mirror of
https://github.com/BerriAI/litellm.git
synced 2025-12-06 11:33:26 +08:00
When Gemini image generation models return `text_tokens=0` with `image_tokens > 0`, the cost calculator was assuming no token breakdown existed and treating all completion tokens as text tokens, resulting in ~10x underestimation of costs. Changes: - Fix cost calculation logic to respect token breakdown when image/audio/reasoning tokens are present, even if text_tokens=0 - Add `output_cost_per_image_token` pricing for gemini-3-pro-image-preview models - Add test case reproducing the issue - Add documentation explaining image token pricing Fixes #17410
27823 lines
970 KiB
JSON
27823 lines
970 KiB
JSON
{
|
|
"sample_spec": {
|
|
"code_interpreter_cost_per_session": 0.0,
|
|
"computer_use_input_cost_per_1k_tokens": 0.0,
|
|
"computer_use_output_cost_per_1k_tokens": 0.0,
|
|
"deprecation_date": "date when the model becomes deprecated in the format YYYY-MM-DD",
|
|
"file_search_cost_per_1k_calls": 0.0,
|
|
"file_search_cost_per_gb_per_day": 0.0,
|
|
"input_cost_per_audio_token": 0.0,
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "one of https://docs.litellm.ai/docs/providers",
|
|
"max_input_tokens": "max input tokens, if the provider specifies it. if not default to max_tokens",
|
|
"max_output_tokens": "max output tokens, if the provider specifies it. if not default to max_tokens",
|
|
"max_tokens": "LEGACY parameter. set to max_output_tokens if provider specifies it. IF not set to max_input_tokens, if provider specifies it.",
|
|
"mode": "one of: chat, embedding, completion, image_generation, audio_transcription, audio_speech, image_generation, moderation, rerank, search",
|
|
"output_cost_per_reasoning_token": 0.0,
|
|
"output_cost_per_token": 0.0,
|
|
"search_context_cost_per_query": {
|
|
"search_context_size_high": 0.0,
|
|
"search_context_size_low": 0.0,
|
|
"search_context_size_medium": 0.0
|
|
},
|
|
"supported_regions": [
|
|
"global",
|
|
"us-west-2",
|
|
"eu-west-1",
|
|
"ap-southeast-1",
|
|
"ap-northeast-1"
|
|
],
|
|
"supports_audio_input": true,
|
|
"supports_audio_output": true,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true,
|
|
"vector_store_cost_per_gb_per_day": 0.0
|
|
},
|
|
"1024-x-1024/50-steps/bedrock/amazon.nova-canvas-v1:0": {
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 2600,
|
|
"mode": "image_generation",
|
|
"output_cost_per_image": 0.06
|
|
},
|
|
"1024-x-1024/50-steps/stability.stable-diffusion-xl-v1": {
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 77,
|
|
"max_tokens": 77,
|
|
"mode": "image_generation",
|
|
"output_cost_per_image": 0.04
|
|
},
|
|
"1024-x-1024/dall-e-2": {
|
|
"input_cost_per_pixel": 1.9e-08,
|
|
"litellm_provider": "openai",
|
|
"mode": "image_generation",
|
|
"output_cost_per_pixel": 0.0
|
|
},
|
|
"1024-x-1024/max-steps/stability.stable-diffusion-xl-v1": {
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 77,
|
|
"max_tokens": 77,
|
|
"mode": "image_generation",
|
|
"output_cost_per_image": 0.08
|
|
},
|
|
"256-x-256/dall-e-2": {
|
|
"input_cost_per_pixel": 2.4414e-07,
|
|
"litellm_provider": "openai",
|
|
"mode": "image_generation",
|
|
"output_cost_per_pixel": 0.0
|
|
},
|
|
"512-x-512/50-steps/stability.stable-diffusion-xl-v0": {
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 77,
|
|
"max_tokens": 77,
|
|
"mode": "image_generation",
|
|
"output_cost_per_image": 0.018
|
|
},
|
|
"512-x-512/dall-e-2": {
|
|
"input_cost_per_pixel": 6.86e-08,
|
|
"litellm_provider": "openai",
|
|
"mode": "image_generation",
|
|
"output_cost_per_pixel": 0.0
|
|
},
|
|
"512-x-512/max-steps/stability.stable-diffusion-xl-v0": {
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 77,
|
|
"max_tokens": 77,
|
|
"mode": "image_generation",
|
|
"output_cost_per_image": 0.036
|
|
},
|
|
"ai21.j2-mid-v1": {
|
|
"input_cost_per_token": 1.25e-05,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 8191,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.25e-05
|
|
},
|
|
"ai21.j2-ultra-v1": {
|
|
"input_cost_per_token": 1.88e-05,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 8191,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.88e-05
|
|
},
|
|
"ai21.jamba-1-5-large-v1:0": {
|
|
"input_cost_per_token": 2e-06,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 256000,
|
|
"max_output_tokens": 256000,
|
|
"max_tokens": 256000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 8e-06
|
|
},
|
|
"ai21.jamba-1-5-mini-v1:0": {
|
|
"input_cost_per_token": 2e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 256000,
|
|
"max_output_tokens": 256000,
|
|
"max_tokens": 256000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4e-07
|
|
},
|
|
"ai21.jamba-instruct-v1:0": {
|
|
"input_cost_per_token": 5e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 70000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7e-07,
|
|
"supports_system_messages": true
|
|
},
|
|
"aiml/dall-e-2": {
|
|
"litellm_provider": "aiml",
|
|
"metadata": {
|
|
"notes": "DALL-E 2 via AI/ML API - Reliable text-to-image generation"
|
|
},
|
|
"mode": "image_generation",
|
|
"output_cost_per_image": 0.021,
|
|
"source": "https://docs.aimlapi.com/",
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"aiml/dall-e-3": {
|
|
"litellm_provider": "aiml",
|
|
"metadata": {
|
|
"notes": "DALL-E 3 via AI/ML API - High-quality text-to-image generation"
|
|
},
|
|
"mode": "image_generation",
|
|
"output_cost_per_image": 0.042,
|
|
"source": "https://docs.aimlapi.com/",
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"aiml/flux-pro": {
|
|
"litellm_provider": "aiml",
|
|
"metadata": {
|
|
"notes": "Flux Dev - Development version optimized for experimentation"
|
|
},
|
|
"mode": "image_generation",
|
|
"output_cost_per_image": 0.053,
|
|
"source": "https://docs.aimlapi.com/",
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"aiml/flux-pro/v1.1": {
|
|
"litellm_provider": "aiml",
|
|
"mode": "image_generation",
|
|
"output_cost_per_image": 0.042,
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"aiml/flux-pro/v1.1-ultra": {
|
|
"litellm_provider": "aiml",
|
|
"mode": "image_generation",
|
|
"output_cost_per_image": 0.063,
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"aiml/flux-realism": {
|
|
"litellm_provider": "aiml",
|
|
"metadata": {
|
|
"notes": "Flux Pro - Professional-grade image generation model"
|
|
},
|
|
"mode": "image_generation",
|
|
"output_cost_per_image": 0.037,
|
|
"source": "https://docs.aimlapi.com/",
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"aiml/flux/dev": {
|
|
"litellm_provider": "aiml",
|
|
"metadata": {
|
|
"notes": "Flux Dev - Development version optimized for experimentation"
|
|
},
|
|
"mode": "image_generation",
|
|
"output_cost_per_image": 0.026,
|
|
"source": "https://docs.aimlapi.com/",
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"aiml/flux/kontext-max/text-to-image": {
|
|
"litellm_provider": "aiml",
|
|
"metadata": {
|
|
"notes": "Flux Pro v1.1 - Enhanced version with improved capabilities and 6x faster inference speed"
|
|
},
|
|
"mode": "image_generation",
|
|
"output_cost_per_image": 0.084,
|
|
"source": "https://docs.aimlapi.com/",
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"aiml/flux/kontext-pro/text-to-image": {
|
|
"litellm_provider": "aiml",
|
|
"metadata": {
|
|
"notes": "Flux Pro v1.1 - Enhanced version with improved capabilities and 6x faster inference speed"
|
|
},
|
|
"mode": "image_generation",
|
|
"output_cost_per_image": 0.042,
|
|
"source": "https://docs.aimlapi.com/",
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"aiml/flux/schnell": {
|
|
"litellm_provider": "aiml",
|
|
"metadata": {
|
|
"notes": "Flux Schnell - Fast generation model optimized for speed"
|
|
},
|
|
"mode": "image_generation",
|
|
"output_cost_per_image": 0.003,
|
|
"source": "https://docs.aimlapi.com/",
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"amazon.nova-canvas-v1:0": {
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 2600,
|
|
"mode": "image_generation",
|
|
"output_cost_per_image": 0.06
|
|
},
|
|
"amazon.nova-lite-v1:0": {
|
|
"input_cost_per_token": 6e-08,
|
|
"litellm_provider": "bedrock_converse",
|
|
"max_input_tokens": 300000,
|
|
"max_output_tokens": 10000,
|
|
"max_tokens": 10000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.4e-07,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_vision": true
|
|
},
|
|
"amazon.nova-2-lite-v1:0": {
|
|
"input_cost_per_token": 3e-07,
|
|
"litellm_provider": "bedrock_converse",
|
|
"max_input_tokens": 1000000,
|
|
"max_output_tokens": 64000,
|
|
"max_tokens": 64000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.5e-06,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_video_input": true,
|
|
"supports_vision": true
|
|
},
|
|
"apac.amazon.nova-2-lite-v1:0": {
|
|
"input_cost_per_token": 6e-08,
|
|
"litellm_provider": "bedrock_converse",
|
|
"max_input_tokens": 1000000,
|
|
"max_output_tokens": 64000,
|
|
"max_tokens": 64000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.75e-06,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_video_input": true,
|
|
"supports_vision": true
|
|
},
|
|
"eu.amazon.nova-2-lite-v1:0": {
|
|
"input_cost_per_token": 6e-08,
|
|
"litellm_provider": "bedrock_converse",
|
|
"max_input_tokens": 1000000,
|
|
"max_output_tokens": 64000,
|
|
"max_tokens": 64000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.75e-06,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_video_input": true,
|
|
"supports_vision": true
|
|
},
|
|
"us.amazon.nova-2-lite-v1:0": {
|
|
"input_cost_per_token": 6e-08,
|
|
"litellm_provider": "bedrock_converse",
|
|
"max_input_tokens": 1000000,
|
|
"max_output_tokens": 64000,
|
|
"max_tokens": 64000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.75e-06,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_video_input": true,
|
|
"supports_vision": true
|
|
},
|
|
|
|
"amazon.nova-micro-v1:0": {
|
|
"input_cost_per_token": 3.5e-08,
|
|
"litellm_provider": "bedrock_converse",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 10000,
|
|
"max_tokens": 10000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.4e-07,
|
|
"supports_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true
|
|
},
|
|
"amazon.nova-pro-v1:0": {
|
|
"input_cost_per_token": 8e-07,
|
|
"litellm_provider": "bedrock_converse",
|
|
"max_input_tokens": 300000,
|
|
"max_output_tokens": 10000,
|
|
"max_tokens": 10000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3.2e-06,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_vision": true
|
|
},
|
|
"amazon.rerank-v1:0": {
|
|
"input_cost_per_query": 0.001,
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "bedrock",
|
|
"max_document_chunks_per_query": 100,
|
|
"max_input_tokens": 32000,
|
|
"max_output_tokens": 32000,
|
|
"max_query_tokens": 32000,
|
|
"max_tokens": 32000,
|
|
"max_tokens_per_document_chunk": 512,
|
|
"mode": "rerank",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"amazon.titan-embed-image-v1": {
|
|
"input_cost_per_image": 6e-05,
|
|
"input_cost_per_token": 8e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 128,
|
|
"max_tokens": 128,
|
|
"metadata": {
|
|
"notes": "'supports_image_input' is a deprecated field. Use 'supports_embedding_image_input' instead."
|
|
},
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0,
|
|
"output_vector_size": 1024,
|
|
"source": "https://us-east-1.console.aws.amazon.com/bedrock/home?region=us-east-1#/providers?model=amazon.titan-image-generator-v1",
|
|
"supports_embedding_image_input": true,
|
|
"supports_image_input": true
|
|
},
|
|
"amazon.titan-embed-text-v1": {
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0,
|
|
"output_vector_size": 1536
|
|
},
|
|
"amazon.titan-embed-text-v2:0": {
|
|
"input_cost_per_token": 2e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0,
|
|
"output_vector_size": 1024
|
|
},
|
|
"amazon.titan-image-generator-v1": {
|
|
"input_cost_per_image": 0.0,
|
|
"output_cost_per_image": 0.008,
|
|
"output_cost_per_image_premium_image": 0.01,
|
|
"output_cost_per_image_above_512_and_512_pixels": 0.01,
|
|
"output_cost_per_image_above_512_and_512_pixels_and_premium_image": 0.012,
|
|
"litellm_provider": "bedrock",
|
|
"mode": "image_generation"
|
|
},
|
|
"amazon.titan-image-generator-v2": {
|
|
"input_cost_per_image": 0.0,
|
|
"output_cost_per_image": 0.008,
|
|
"output_cost_per_image_premium_image": 0.01,
|
|
"output_cost_per_image_above_1024_and_1024_pixels": 0.01,
|
|
"output_cost_per_image_above_1024_and_1024_pixels_and_premium_image": 0.012,
|
|
"litellm_provider": "bedrock",
|
|
"mode": "image_generation"
|
|
},
|
|
"amazon.titan-image-generator-v2:0": {
|
|
"input_cost_per_image": 0.0,
|
|
"output_cost_per_image": 0.008,
|
|
"output_cost_per_image_premium_image": 0.01,
|
|
"output_cost_per_image_above_1024_and_1024_pixels": 0.01,
|
|
"output_cost_per_image_above_1024_and_1024_pixels_and_premium_image": 0.012,
|
|
"litellm_provider": "bedrock",
|
|
"mode": "image_generation"
|
|
},
|
|
"twelvelabs.marengo-embed-2-7-v1:0": {
|
|
"input_cost_per_token": 7e-05,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 77,
|
|
"max_tokens": 77,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0,
|
|
"output_vector_size": 1024,
|
|
"supports_embedding_image_input": true,
|
|
"supports_image_input": true
|
|
},
|
|
"us.twelvelabs.marengo-embed-2-7-v1:0": {
|
|
"input_cost_per_token": 7e-05,
|
|
"input_cost_per_video_per_second": 0.0007,
|
|
"input_cost_per_audio_per_second": 0.00014,
|
|
"input_cost_per_image": 0.0001,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 77,
|
|
"max_tokens": 77,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0,
|
|
"output_vector_size": 1024,
|
|
"supports_embedding_image_input": true,
|
|
"supports_image_input": true
|
|
},
|
|
"eu.twelvelabs.marengo-embed-2-7-v1:0": {
|
|
"input_cost_per_token": 7e-05,
|
|
"input_cost_per_video_per_second": 0.0007,
|
|
"input_cost_per_audio_per_second": 0.00014,
|
|
"input_cost_per_image": 0.0001,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 77,
|
|
"max_tokens": 77,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0,
|
|
"output_vector_size": 1024,
|
|
"supports_embedding_image_input": true,
|
|
"supports_image_input": true
|
|
},
|
|
"twelvelabs.pegasus-1-2-v1:0": {
|
|
"input_cost_per_video_per_second": 0.00049,
|
|
"output_cost_per_token": 7.5e-06,
|
|
"litellm_provider": "bedrock",
|
|
"mode": "chat",
|
|
"supports_video_input": true
|
|
},
|
|
"us.twelvelabs.pegasus-1-2-v1:0": {
|
|
"input_cost_per_video_per_second": 0.00049,
|
|
"output_cost_per_token": 7.5e-06,
|
|
"litellm_provider": "bedrock",
|
|
"mode": "chat",
|
|
"supports_video_input": true
|
|
},
|
|
"eu.twelvelabs.pegasus-1-2-v1:0": {
|
|
"input_cost_per_video_per_second": 0.00049,
|
|
"output_cost_per_token": 7.5e-06,
|
|
"litellm_provider": "bedrock",
|
|
"mode": "chat",
|
|
"supports_video_input": true
|
|
},
|
|
"amazon.titan-text-express-v1": {
|
|
"input_cost_per_token": 1.3e-06,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 42000,
|
|
"max_output_tokens": 8000,
|
|
"max_tokens": 8000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.7e-06
|
|
},
|
|
"amazon.titan-text-lite-v1": {
|
|
"input_cost_per_token": 3e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 42000,
|
|
"max_output_tokens": 4000,
|
|
"max_tokens": 4000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4e-07
|
|
},
|
|
"amazon.titan-text-premier-v1:0": {
|
|
"input_cost_per_token": 5e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 42000,
|
|
"max_output_tokens": 32000,
|
|
"max_tokens": 32000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-06
|
|
},
|
|
"anthropic.claude-3-5-haiku-20241022-v1:0": {
|
|
"cache_creation_input_token_cost": 1e-06,
|
|
"cache_read_input_token_cost": 8e-08,
|
|
"input_cost_per_token": 8e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4e-06,
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"anthropic.claude-haiku-4-5-20251001-v1:0": {
|
|
"cache_creation_input_token_cost": 1.25e-06,
|
|
"cache_read_input_token_cost": 1e-07,
|
|
"input_cost_per_token": 1e-06,
|
|
"litellm_provider": "bedrock_converse",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 64000,
|
|
"max_tokens": 64000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 5e-06,
|
|
"source": "https://aws.amazon.com/about-aws/whats-new/2025/10/claude-4-5-haiku-anthropic-amazon-bedrock",
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tool_use_system_prompt_tokens": 346
|
|
},
|
|
"anthropic.claude-haiku-4-5@20251001": {
|
|
"cache_creation_input_token_cost": 1.25e-06,
|
|
"cache_read_input_token_cost": 1e-07,
|
|
"input_cost_per_token": 1e-06,
|
|
"litellm_provider": "bedrock_converse",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 64000,
|
|
"max_tokens": 64000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 5e-06,
|
|
"source": "https://aws.amazon.com/about-aws/whats-new/2025/10/claude-4-5-haiku-anthropic-amazon-bedrock",
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tool_use_system_prompt_tokens": 346
|
|
},
|
|
"anthropic.claude-3-5-sonnet-20240620-v1:0": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"anthropic.claude-3-5-sonnet-20241022-v2:0": {
|
|
"cache_creation_input_token_cost": 3.75e-06,
|
|
"cache_read_input_token_cost": 3e-07,
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"anthropic.claude-3-7-sonnet-20240620-v1:0": {
|
|
"cache_creation_input_token_cost": 4.5e-06,
|
|
"cache_read_input_token_cost": 3.6e-07,
|
|
"input_cost_per_token": 3.6e-06,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.8e-05,
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"anthropic.claude-3-7-sonnet-20250219-v1:0": {
|
|
"cache_creation_input_token_cost": 3.75e-06,
|
|
"cache_read_input_token_cost": 3e-07,
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "bedrock_converse",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"anthropic.claude-3-haiku-20240307-v1:0": {
|
|
"input_cost_per_token": 2.5e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.25e-06,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"anthropic.claude-3-opus-20240229-v1:0": {
|
|
"input_cost_per_token": 1.5e-05,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7.5e-05,
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"anthropic.claude-3-sonnet-20240229-v1:0": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"anthropic.claude-instant-v1": {
|
|
"input_cost_per_token": 8e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 100000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.4e-06,
|
|
"supports_tool_choice": true
|
|
},
|
|
"anthropic.claude-opus-4-1-20250805-v1:0": {
|
|
"cache_creation_input_token_cost": 1.875e-05,
|
|
"cache_read_input_token_cost": 1.5e-06,
|
|
"input_cost_per_token": 1.5e-05,
|
|
"litellm_provider": "bedrock_converse",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 32000,
|
|
"max_tokens": 32000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7.5e-05,
|
|
"search_context_cost_per_query": {
|
|
"search_context_size_high": 0.01,
|
|
"search_context_size_low": 0.01,
|
|
"search_context_size_medium": 0.01
|
|
},
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tool_use_system_prompt_tokens": 159
|
|
},
|
|
"anthropic.claude-opus-4-20250514-v1:0": {
|
|
"cache_creation_input_token_cost": 1.875e-05,
|
|
"cache_read_input_token_cost": 1.5e-06,
|
|
"input_cost_per_token": 1.5e-05,
|
|
"litellm_provider": "bedrock_converse",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 32000,
|
|
"max_tokens": 32000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7.5e-05,
|
|
"search_context_cost_per_query": {
|
|
"search_context_size_high": 0.01,
|
|
"search_context_size_low": 0.01,
|
|
"search_context_size_medium": 0.01
|
|
},
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tool_use_system_prompt_tokens": 159
|
|
},
|
|
"anthropic.claude-opus-4-5-20251101-v1:0": {
|
|
"cache_creation_input_token_cost": 6.25e-06,
|
|
"cache_read_input_token_cost": 5e-07,
|
|
"input_cost_per_token": 5e-06,
|
|
"litellm_provider": "bedrock_converse",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 64000,
|
|
"max_tokens": 64000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.5e-05,
|
|
"search_context_cost_per_query": {
|
|
"search_context_size_high": 0.01,
|
|
"search_context_size_low": 0.01,
|
|
"search_context_size_medium": 0.01
|
|
},
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tool_use_system_prompt_tokens": 159
|
|
},
|
|
"anthropic.claude-sonnet-4-20250514-v1:0": {
|
|
"cache_creation_input_token_cost": 3.75e-06,
|
|
"cache_read_input_token_cost": 3e-07,
|
|
"input_cost_per_token": 3e-06,
|
|
"input_cost_per_token_above_200k_tokens": 6e-06,
|
|
"output_cost_per_token_above_200k_tokens": 2.25e-05,
|
|
"cache_creation_input_token_cost_above_200k_tokens": 7.5e-06,
|
|
"cache_read_input_token_cost_above_200k_tokens": 6e-07,
|
|
"litellm_provider": "bedrock_converse",
|
|
"max_input_tokens": 1000000,
|
|
"max_output_tokens": 64000,
|
|
"max_tokens": 64000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"search_context_cost_per_query": {
|
|
"search_context_size_high": 0.01,
|
|
"search_context_size_low": 0.01,
|
|
"search_context_size_medium": 0.01
|
|
},
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tool_use_system_prompt_tokens": 159
|
|
},
|
|
"anthropic.claude-sonnet-4-5-20250929-v1:0": {
|
|
"cache_creation_input_token_cost": 3.75e-06,
|
|
"cache_read_input_token_cost": 3e-07,
|
|
"input_cost_per_token": 3e-06,
|
|
"input_cost_per_token_above_200k_tokens": 6e-06,
|
|
"output_cost_per_token_above_200k_tokens": 2.25e-05,
|
|
"cache_creation_input_token_cost_above_200k_tokens": 7.5e-06,
|
|
"cache_read_input_token_cost_above_200k_tokens": 6e-07,
|
|
"litellm_provider": "bedrock_converse",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 64000,
|
|
"max_tokens": 64000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"search_context_cost_per_query": {
|
|
"search_context_size_high": 0.01,
|
|
"search_context_size_low": 0.01,
|
|
"search_context_size_medium": 0.01
|
|
},
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tool_use_system_prompt_tokens": 159
|
|
},
|
|
"anthropic.claude-v1": {
|
|
"input_cost_per_token": 8e-06,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 100000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.4e-05
|
|
},
|
|
"anthropic.claude-v2:1": {
|
|
"input_cost_per_token": 8e-06,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 100000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.4e-05,
|
|
"supports_tool_choice": true
|
|
},
|
|
"anyscale/HuggingFaceH4/zephyr-7b-beta": {
|
|
"input_cost_per_token": 1.5e-07,
|
|
"litellm_provider": "anyscale",
|
|
"max_input_tokens": 16384,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-07
|
|
},
|
|
"anyscale/codellama/CodeLlama-34b-Instruct-hf": {
|
|
"input_cost_per_token": 1e-06,
|
|
"litellm_provider": "anyscale",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-06
|
|
},
|
|
"anyscale/codellama/CodeLlama-70b-Instruct-hf": {
|
|
"input_cost_per_token": 1e-06,
|
|
"litellm_provider": "anyscale",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-06,
|
|
"source": "https://docs.anyscale.com/preview/endpoints/text-generation/supported-models/codellama-CodeLlama-70b-Instruct-hf"
|
|
},
|
|
"anyscale/google/gemma-7b-it": {
|
|
"input_cost_per_token": 1.5e-07,
|
|
"litellm_provider": "anyscale",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-07,
|
|
"source": "https://docs.anyscale.com/preview/endpoints/text-generation/supported-models/google-gemma-7b-it"
|
|
},
|
|
"anyscale/meta-llama/Llama-2-13b-chat-hf": {
|
|
"input_cost_per_token": 2.5e-07,
|
|
"litellm_provider": "anyscale",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.5e-07
|
|
},
|
|
"anyscale/meta-llama/Llama-2-70b-chat-hf": {
|
|
"input_cost_per_token": 1e-06,
|
|
"litellm_provider": "anyscale",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-06
|
|
},
|
|
"anyscale/meta-llama/Llama-2-7b-chat-hf": {
|
|
"input_cost_per_token": 1.5e-07,
|
|
"litellm_provider": "anyscale",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-07
|
|
},
|
|
"anyscale/meta-llama/Meta-Llama-3-70B-Instruct": {
|
|
"input_cost_per_token": 1e-06,
|
|
"litellm_provider": "anyscale",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-06,
|
|
"source": "https://docs.anyscale.com/preview/endpoints/text-generation/supported-models/meta-llama-Meta-Llama-3-70B-Instruct"
|
|
},
|
|
"anyscale/meta-llama/Meta-Llama-3-8B-Instruct": {
|
|
"input_cost_per_token": 1.5e-07,
|
|
"litellm_provider": "anyscale",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-07,
|
|
"source": "https://docs.anyscale.com/preview/endpoints/text-generation/supported-models/meta-llama-Meta-Llama-3-8B-Instruct"
|
|
},
|
|
"anyscale/mistralai/Mistral-7B-Instruct-v0.1": {
|
|
"input_cost_per_token": 1.5e-07,
|
|
"litellm_provider": "anyscale",
|
|
"max_input_tokens": 16384,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-07,
|
|
"source": "https://docs.anyscale.com/preview/endpoints/text-generation/supported-models/mistralai-Mistral-7B-Instruct-v0.1",
|
|
"supports_function_calling": true
|
|
},
|
|
"anyscale/mistralai/Mixtral-8x22B-Instruct-v0.1": {
|
|
"input_cost_per_token": 9e-07,
|
|
"litellm_provider": "anyscale",
|
|
"max_input_tokens": 65536,
|
|
"max_output_tokens": 65536,
|
|
"max_tokens": 65536,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 9e-07,
|
|
"source": "https://docs.anyscale.com/preview/endpoints/text-generation/supported-models/mistralai-Mixtral-8x22B-Instruct-v0.1",
|
|
"supports_function_calling": true
|
|
},
|
|
"anyscale/mistralai/Mixtral-8x7B-Instruct-v0.1": {
|
|
"input_cost_per_token": 1.5e-07,
|
|
"litellm_provider": "anyscale",
|
|
"max_input_tokens": 16384,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-07,
|
|
"source": "https://docs.anyscale.com/preview/endpoints/text-generation/supported-models/mistralai-Mixtral-8x7B-Instruct-v0.1",
|
|
"supports_function_calling": true
|
|
},
|
|
"apac.amazon.nova-lite-v1:0": {
|
|
"input_cost_per_token": 6.3e-08,
|
|
"litellm_provider": "bedrock_converse",
|
|
"max_input_tokens": 300000,
|
|
"max_output_tokens": 10000,
|
|
"max_tokens": 10000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.52e-07,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_vision": true
|
|
},
|
|
"apac.amazon.nova-micro-v1:0": {
|
|
"input_cost_per_token": 3.7e-08,
|
|
"litellm_provider": "bedrock_converse",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 10000,
|
|
"max_tokens": 10000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.48e-07,
|
|
"supports_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true
|
|
},
|
|
"apac.amazon.nova-pro-v1:0": {
|
|
"input_cost_per_token": 8.4e-07,
|
|
"litellm_provider": "bedrock_converse",
|
|
"max_input_tokens": 300000,
|
|
"max_output_tokens": 10000,
|
|
"max_tokens": 10000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3.36e-06,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_vision": true
|
|
},
|
|
"apac.anthropic.claude-3-5-sonnet-20240620-v1:0": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"apac.anthropic.claude-3-5-sonnet-20241022-v2:0": {
|
|
"cache_creation_input_token_cost": 3.75e-06,
|
|
"cache_read_input_token_cost": 3e-07,
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"apac.anthropic.claude-3-haiku-20240307-v1:0": {
|
|
"input_cost_per_token": 2.5e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.25e-06,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"apac.anthropic.claude-haiku-4-5-20251001-v1:0": {
|
|
"cache_creation_input_token_cost": 1.375e-06,
|
|
"cache_read_input_token_cost": 1.1e-07,
|
|
"input_cost_per_token": 1.1e-06,
|
|
"litellm_provider": "bedrock_converse",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 64000,
|
|
"max_tokens": 64000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 5.5e-06,
|
|
"source": "https://aws.amazon.com/about-aws/whats-new/2025/10/claude-4-5-haiku-anthropic-amazon-bedrock",
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tool_use_system_prompt_tokens": 346
|
|
},
|
|
"apac.anthropic.claude-3-sonnet-20240229-v1:0": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"apac.anthropic.claude-sonnet-4-20250514-v1:0": {
|
|
"cache_creation_input_token_cost": 3.75e-06,
|
|
"cache_read_input_token_cost": 3e-07,
|
|
"input_cost_per_token": 3e-06,
|
|
"input_cost_per_token_above_200k_tokens": 6e-06,
|
|
"output_cost_per_token_above_200k_tokens": 2.25e-05,
|
|
"cache_creation_input_token_cost_above_200k_tokens": 7.5e-06,
|
|
"cache_read_input_token_cost_above_200k_tokens": 6e-07,
|
|
"litellm_provider": "bedrock_converse",
|
|
"max_input_tokens": 1000000,
|
|
"max_output_tokens": 64000,
|
|
"max_tokens": 64000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"search_context_cost_per_query": {
|
|
"search_context_size_high": 0.01,
|
|
"search_context_size_low": 0.01,
|
|
"search_context_size_medium": 0.01
|
|
},
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tool_use_system_prompt_tokens": 159
|
|
},
|
|
"assemblyai/best": {
|
|
"input_cost_per_second": 3.333e-05,
|
|
"litellm_provider": "assemblyai",
|
|
"mode": "audio_transcription",
|
|
"output_cost_per_second": 0.0
|
|
},
|
|
"assemblyai/nano": {
|
|
"input_cost_per_second": 0.00010278,
|
|
"litellm_provider": "assemblyai",
|
|
"mode": "audio_transcription",
|
|
"output_cost_per_second": 0.0
|
|
},
|
|
"au.anthropic.claude-sonnet-4-5-20250929-v1:0": {
|
|
"cache_creation_input_token_cost": 4.125e-06,
|
|
"cache_read_input_token_cost": 3.3e-07,
|
|
"input_cost_per_token": 3.3e-06,
|
|
"input_cost_per_token_above_200k_tokens": 6.6e-06,
|
|
"output_cost_per_token_above_200k_tokens": 2.475e-05,
|
|
"cache_creation_input_token_cost_above_200k_tokens": 8.25e-06,
|
|
"cache_read_input_token_cost_above_200k_tokens": 6.6e-07,
|
|
"litellm_provider": "bedrock_converse",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 64000,
|
|
"max_tokens": 64000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.65e-05,
|
|
"search_context_cost_per_query": {
|
|
"search_context_size_high": 0.01,
|
|
"search_context_size_low": 0.01,
|
|
"search_context_size_medium": 0.01
|
|
},
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tool_use_system_prompt_tokens": 346
|
|
},
|
|
"azure/ada": {
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"azure/codex-mini": {
|
|
"cache_read_input_token_cost": 3.75e-07,
|
|
"input_cost_per_token": 1.5e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 100000,
|
|
"max_tokens": 100000,
|
|
"mode": "responses",
|
|
"output_cost_per_token": 6e-06,
|
|
"supported_endpoints": [
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/command-r-plus": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"supports_function_calling": true
|
|
},
|
|
"azure/claude-haiku-4-5": {
|
|
"input_cost_per_token": 1e-06,
|
|
"litellm_provider": "azure_ai",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 64000,
|
|
"max_tokens": 64000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 5e-06,
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/claude-opus-4-1": {
|
|
"input_cost_per_token": 1.5e-05,
|
|
"litellm_provider": "azure_ai",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 32000,
|
|
"max_tokens": 32000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7.5e-05,
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/claude-sonnet-4-5": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "azure_ai",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 64000,
|
|
"max_tokens": 64000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/computer-use-preview": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 1024,
|
|
"max_tokens": 1024,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.2e-05,
|
|
"supported_endpoints": [
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": false,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/container": {
|
|
"code_interpreter_cost_per_session": 0.03,
|
|
"litellm_provider": "azure",
|
|
"mode": "chat"
|
|
},
|
|
"azure/eu/gpt-4o-2024-08-06": {
|
|
"deprecation_date": "2026-02-27",
|
|
"cache_read_input_token_cost": 1.375e-06,
|
|
"input_cost_per_token": 2.75e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.1e-05,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/eu/gpt-4o-2024-11-20": {
|
|
"deprecation_date": "2026-03-01",
|
|
"cache_creation_input_token_cost": 1.38e-06,
|
|
"input_cost_per_token": 2.75e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.1e-05,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/eu/gpt-4o-mini-2024-07-18": {
|
|
"cache_read_input_token_cost": 8.3e-08,
|
|
"input_cost_per_token": 1.65e-07,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6.6e-07,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/eu/gpt-4o-mini-realtime-preview-2024-12-17": {
|
|
"cache_creation_input_audio_token_cost": 3.3e-07,
|
|
"cache_read_input_token_cost": 3.3e-07,
|
|
"input_cost_per_audio_token": 1.1e-05,
|
|
"input_cost_per_token": 6.6e-07,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_audio_token": 2.2e-05,
|
|
"output_cost_per_token": 2.64e-06,
|
|
"supports_audio_input": true,
|
|
"supports_audio_output": true,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"azure/eu/gpt-4o-realtime-preview-2024-10-01": {
|
|
"cache_creation_input_audio_token_cost": 2.2e-05,
|
|
"cache_read_input_token_cost": 2.75e-06,
|
|
"input_cost_per_audio_token": 0.00011,
|
|
"input_cost_per_token": 5.5e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_audio_token": 0.00022,
|
|
"output_cost_per_token": 2.2e-05,
|
|
"supports_audio_input": true,
|
|
"supports_audio_output": true,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"azure/eu/gpt-4o-realtime-preview-2024-12-17": {
|
|
"cache_read_input_audio_token_cost": 2.5e-06,
|
|
"cache_read_input_token_cost": 2.75e-06,
|
|
"input_cost_per_audio_token": 4.4e-05,
|
|
"input_cost_per_token": 5.5e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_audio_token": 8e-05,
|
|
"output_cost_per_token": 2.2e-05,
|
|
"supported_modalities": [
|
|
"text",
|
|
"audio"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text",
|
|
"audio"
|
|
],
|
|
"supports_audio_input": true,
|
|
"supports_audio_output": true,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"azure/eu/gpt-5-2025-08-07": {
|
|
"cache_read_input_token_cost": 1.375e-07,
|
|
"input_cost_per_token": 1.375e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 272000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.1e-05,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/batch",
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/eu/gpt-5-mini-2025-08-07": {
|
|
"cache_read_input_token_cost": 2.75e-08,
|
|
"input_cost_per_token": 2.75e-07,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 272000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.2e-06,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/batch",
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/eu/gpt-5.1": {
|
|
"cache_read_input_token_cost": 1.4e-07,
|
|
"input_cost_per_token": 1.38e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 272000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.1e-05,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/batch",
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/eu/gpt-5.1-chat": {
|
|
"cache_read_input_token_cost": 1.4e-07,
|
|
"input_cost_per_token": 1.38e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 272000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.1e-05,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/batch",
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/eu/gpt-5.1-codex": {
|
|
"cache_read_input_token_cost": 1.4e-07,
|
|
"input_cost_per_token": 1.38e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 272000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "responses",
|
|
"output_cost_per_token": 1.1e-05,
|
|
"supported_endpoints": [
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": false,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/eu/gpt-5.1-codex-mini": {
|
|
"cache_read_input_token_cost": 2.8e-08,
|
|
"input_cost_per_token": 2.75e-07,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 272000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "responses",
|
|
"output_cost_per_token": 2.2e-06,
|
|
"supported_endpoints": [
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": false,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/eu/gpt-5-nano-2025-08-07": {
|
|
"cache_read_input_token_cost": 5.5e-09,
|
|
"input_cost_per_token": 5.5e-08,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 272000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4.4e-07,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/batch",
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/eu/o1-2024-12-17": {
|
|
"cache_read_input_token_cost": 8.25e-06,
|
|
"input_cost_per_token": 1.65e-05,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 100000,
|
|
"max_tokens": 100000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6.6e-05,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/eu/o1-mini-2024-09-12": {
|
|
"cache_read_input_token_cost": 6.05e-07,
|
|
"input_cost_per_token": 1.21e-06,
|
|
"input_cost_per_token_batches": 6.05e-07,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 65536,
|
|
"max_tokens": 65536,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4.84e-06,
|
|
"output_cost_per_token_batches": 2.42e-06,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_vision": false
|
|
},
|
|
"azure/eu/o1-preview-2024-09-12": {
|
|
"cache_read_input_token_cost": 8.25e-06,
|
|
"input_cost_per_token": 1.65e-05,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6.6e-05,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_vision": false
|
|
},
|
|
"azure/eu/o3-mini-2025-01-31": {
|
|
"cache_read_input_token_cost": 6.05e-07,
|
|
"input_cost_per_token": 1.21e-06,
|
|
"input_cost_per_token_batches": 6.05e-07,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 100000,
|
|
"max_tokens": 100000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4.84e-06,
|
|
"output_cost_per_token_batches": 2.42e-06,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": false
|
|
},
|
|
"azure/global-standard/gpt-4o-2024-08-06": {
|
|
"cache_read_input_token_cost": 1.25e-06,
|
|
"deprecation_date": "2026-02-27",
|
|
"input_cost_per_token": 2.5e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-05,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/global-standard/gpt-4o-2024-11-20": {
|
|
"cache_read_input_token_cost": 1.25e-06,
|
|
"deprecation_date": "2026-03-01",
|
|
"input_cost_per_token": 2.5e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-05,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/global-standard/gpt-4o-mini": {
|
|
"input_cost_per_token": 1.5e-07,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-07,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/global/gpt-4o-2024-08-06": {
|
|
"deprecation_date": "2026-02-27",
|
|
"cache_read_input_token_cost": 1.25e-06,
|
|
"input_cost_per_token": 2.5e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-05,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/global/gpt-4o-2024-11-20": {
|
|
"deprecation_date": "2026-03-01",
|
|
"cache_read_input_token_cost": 1.25e-06,
|
|
"input_cost_per_token": 2.5e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-05,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/global/gpt-5.1": {
|
|
"cache_read_input_token_cost": 1.25e-07,
|
|
"input_cost_per_token": 1.25e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 272000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-05,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/batch",
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/global/gpt-5.1-chat": {
|
|
"cache_read_input_token_cost": 1.25e-07,
|
|
"input_cost_per_token": 1.25e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 272000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-05,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/batch",
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/global/gpt-5.1-codex": {
|
|
"cache_read_input_token_cost": 1.25e-07,
|
|
"input_cost_per_token": 1.25e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 272000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "responses",
|
|
"output_cost_per_token": 1e-05,
|
|
"supported_endpoints": [
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": false,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/global/gpt-5.1-codex-mini": {
|
|
"cache_read_input_token_cost": 2.5e-08,
|
|
"input_cost_per_token": 2.5e-07,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 272000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "responses",
|
|
"output_cost_per_token": 2e-06,
|
|
"supported_endpoints": [
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": false,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/gpt-3.5-turbo": {
|
|
"input_cost_per_token": 5e-07,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 4097,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-06,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"azure/gpt-3.5-turbo-0125": {
|
|
"deprecation_date": "2025-03-31",
|
|
"input_cost_per_token": 5e-07,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 16384,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-06,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"azure/gpt-3.5-turbo-instruct-0914": {
|
|
"input_cost_per_token": 1.5e-06,
|
|
"litellm_provider": "azure_text",
|
|
"max_input_tokens": 4097,
|
|
"max_tokens": 4097,
|
|
"mode": "completion",
|
|
"output_cost_per_token": 2e-06
|
|
},
|
|
"azure/gpt-35-turbo": {
|
|
"input_cost_per_token": 5e-07,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 4097,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-06,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"azure/gpt-35-turbo-0125": {
|
|
"deprecation_date": "2025-05-31",
|
|
"input_cost_per_token": 5e-07,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 16384,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-06,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"azure/gpt-35-turbo-0301": {
|
|
"deprecation_date": "2025-02-13",
|
|
"input_cost_per_token": 2e-07,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 4097,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4097,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2e-06,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"azure/gpt-35-turbo-0613": {
|
|
"deprecation_date": "2025-02-13",
|
|
"input_cost_per_token": 1.5e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 4097,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4097,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2e-06,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"azure/gpt-35-turbo-1106": {
|
|
"deprecation_date": "2025-03-31",
|
|
"input_cost_per_token": 1e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 16384,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2e-06,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"azure/gpt-35-turbo-16k": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 16385,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4e-06,
|
|
"supports_tool_choice": true
|
|
},
|
|
"azure/gpt-35-turbo-16k-0613": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 16385,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4e-06,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"azure/gpt-35-turbo-instruct": {
|
|
"input_cost_per_token": 1.5e-06,
|
|
"litellm_provider": "azure_text",
|
|
"max_input_tokens": 4097,
|
|
"max_tokens": 4097,
|
|
"mode": "completion",
|
|
"output_cost_per_token": 2e-06
|
|
},
|
|
"azure/gpt-35-turbo-instruct-0914": {
|
|
"input_cost_per_token": 1.5e-06,
|
|
"litellm_provider": "azure_text",
|
|
"max_input_tokens": 4097,
|
|
"max_tokens": 4097,
|
|
"mode": "completion",
|
|
"output_cost_per_token": 2e-06
|
|
},
|
|
"azure/gpt-4": {
|
|
"input_cost_per_token": 3e-05,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-05,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"azure/gpt-4-0125-preview": {
|
|
"input_cost_per_token": 1e-05,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-05,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"azure/gpt-4-0613": {
|
|
"input_cost_per_token": 3e-05,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-05,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"azure/gpt-4-1106-preview": {
|
|
"input_cost_per_token": 1e-05,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-05,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"azure/gpt-4-32k": {
|
|
"input_cost_per_token": 6e-05,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 32768,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.00012,
|
|
"supports_tool_choice": true
|
|
},
|
|
"azure/gpt-4-32k-0613": {
|
|
"input_cost_per_token": 6e-05,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 32768,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.00012,
|
|
"supports_tool_choice": true
|
|
},
|
|
"azure/gpt-4-turbo": {
|
|
"input_cost_per_token": 1e-05,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-05,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"azure/gpt-4-turbo-2024-04-09": {
|
|
"input_cost_per_token": 1e-05,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-05,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/gpt-4-turbo-vision-preview": {
|
|
"input_cost_per_token": 1e-05,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-05,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/gpt-4.1": {
|
|
"cache_read_input_token_cost": 5e-07,
|
|
"input_cost_per_token": 2e-06,
|
|
"input_cost_per_token_batches": 1e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 1047576,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 8e-06,
|
|
"output_cost_per_token_batches": 4e-06,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/batch",
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": false
|
|
},
|
|
"azure/gpt-4.1-2025-04-14": {
|
|
"deprecation_date": "2026-11-04",
|
|
"cache_read_input_token_cost": 5e-07,
|
|
"input_cost_per_token": 2e-06,
|
|
"input_cost_per_token_batches": 1e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 1047576,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 8e-06,
|
|
"output_cost_per_token_batches": 4e-06,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/batch",
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": false
|
|
},
|
|
"azure/gpt-4.1-mini": {
|
|
"cache_read_input_token_cost": 1e-07,
|
|
"input_cost_per_token": 4e-07,
|
|
"input_cost_per_token_batches": 2e-07,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 1047576,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.6e-06,
|
|
"output_cost_per_token_batches": 8e-07,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/batch",
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": false
|
|
},
|
|
"azure/gpt-4.1-mini-2025-04-14": {
|
|
"deprecation_date": "2026-11-04",
|
|
"cache_read_input_token_cost": 1e-07,
|
|
"input_cost_per_token": 4e-07,
|
|
"input_cost_per_token_batches": 2e-07,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 1047576,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.6e-06,
|
|
"output_cost_per_token_batches": 8e-07,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/batch",
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": false
|
|
},
|
|
"azure/gpt-4.1-nano": {
|
|
"cache_read_input_token_cost": 2.5e-08,
|
|
"input_cost_per_token": 1e-07,
|
|
"input_cost_per_token_batches": 5e-08,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 1047576,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4e-07,
|
|
"output_cost_per_token_batches": 2e-07,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/batch",
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/gpt-4.1-nano-2025-04-14": {
|
|
"deprecation_date": "2026-11-04",
|
|
"cache_read_input_token_cost": 2.5e-08,
|
|
"input_cost_per_token": 1e-07,
|
|
"input_cost_per_token_batches": 5e-08,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 1047576,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4e-07,
|
|
"output_cost_per_token_batches": 2e-07,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/batch",
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/gpt-4.5-preview": {
|
|
"cache_read_input_token_cost": 3.75e-05,
|
|
"input_cost_per_token": 7.5e-05,
|
|
"input_cost_per_token_batches": 3.75e-05,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.00015,
|
|
"output_cost_per_token_batches": 7.5e-05,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/gpt-4o": {
|
|
"cache_read_input_token_cost": 1.25e-06,
|
|
"input_cost_per_token": 2.5e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-05,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/gpt-4o-2024-05-13": {
|
|
"input_cost_per_token": 5e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/gpt-4o-2024-08-06": {
|
|
"deprecation_date": "2026-02-27",
|
|
"cache_read_input_token_cost": 1.25e-06,
|
|
"input_cost_per_token": 2.5e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-05,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/gpt-4o-2024-11-20": {
|
|
"deprecation_date": "2026-03-01",
|
|
"cache_read_input_token_cost": 1.25e-06,
|
|
"input_cost_per_token": 2.75e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.1e-05,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/gpt-audio-2025-08-28": {
|
|
"input_cost_per_audio_token": 4e-05,
|
|
"input_cost_per_token": 2.5e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_audio_token": 8e-05,
|
|
"output_cost_per_token": 1e-05,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"audio"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text",
|
|
"audio"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": false,
|
|
"supports_reasoning": false,
|
|
"supports_response_schema": false,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": false
|
|
},
|
|
"azure/gpt-audio-mini-2025-10-06": {
|
|
"input_cost_per_audio_token": 1e-05,
|
|
"input_cost_per_token": 6e-07,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_audio_token": 2e-05,
|
|
"output_cost_per_token": 2.4e-06,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"audio"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text",
|
|
"audio"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": false,
|
|
"supports_reasoning": false,
|
|
"supports_response_schema": false,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": false
|
|
},
|
|
"azure/gpt-4o-audio-preview-2024-12-17": {
|
|
"input_cost_per_audio_token": 4e-05,
|
|
"input_cost_per_token": 2.5e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_audio_token": 8e-05,
|
|
"output_cost_per_token": 1e-05,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"audio"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text",
|
|
"audio"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": false,
|
|
"supports_reasoning": false,
|
|
"supports_response_schema": false,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": false
|
|
},
|
|
"azure/gpt-4o-mini": {
|
|
"cache_read_input_token_cost": 7.5e-08,
|
|
"input_cost_per_token": 1.65e-07,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6.6e-07,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/gpt-4o-mini-2024-07-18": {
|
|
"cache_read_input_token_cost": 7.5e-08,
|
|
"input_cost_per_token": 1.65e-07,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6.6e-07,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/gpt-4o-mini-audio-preview-2024-12-17": {
|
|
"input_cost_per_audio_token": 4e-05,
|
|
"input_cost_per_token": 2.5e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_audio_token": 8e-05,
|
|
"output_cost_per_token": 1e-05,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"audio"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text",
|
|
"audio"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": false,
|
|
"supports_reasoning": false,
|
|
"supports_response_schema": false,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": false
|
|
},
|
|
"azure/gpt-4o-mini-realtime-preview-2024-12-17": {
|
|
"cache_creation_input_audio_token_cost": 3e-07,
|
|
"cache_read_input_token_cost": 3e-07,
|
|
"input_cost_per_audio_token": 1e-05,
|
|
"input_cost_per_token": 6e-07,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_audio_token": 2e-05,
|
|
"output_cost_per_token": 2.4e-06,
|
|
"supports_audio_input": true,
|
|
"supports_audio_output": true,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"azure/gpt-realtime-2025-08-28": {
|
|
"cache_creation_input_audio_token_cost": 4e-06,
|
|
"cache_read_input_token_cost": 4e-06,
|
|
"input_cost_per_audio_token": 3.2e-05,
|
|
"input_cost_per_image": 5e-06,
|
|
"input_cost_per_token": 4e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 32000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_audio_token": 6.4e-05,
|
|
"output_cost_per_token": 1.6e-05,
|
|
"supported_endpoints": [
|
|
"/v1/realtime"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image",
|
|
"audio"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text",
|
|
"audio"
|
|
],
|
|
"supports_audio_input": true,
|
|
"supports_audio_output": true,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"azure/gpt-realtime-mini-2025-10-06": {
|
|
"cache_creation_input_audio_token_cost": 3e-07,
|
|
"cache_read_input_token_cost": 6e-08,
|
|
"input_cost_per_audio_token": 1e-05,
|
|
"input_cost_per_image": 8e-07,
|
|
"input_cost_per_token": 6e-07,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 32000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_audio_token": 2e-05,
|
|
"output_cost_per_token": 2.4e-06,
|
|
"supported_endpoints": [
|
|
"/v1/realtime"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image",
|
|
"audio"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text",
|
|
"audio"
|
|
],
|
|
"supports_audio_input": true,
|
|
"supports_audio_output": true,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"azure/gpt-4o-mini-transcribe": {
|
|
"input_cost_per_audio_token": 3e-06,
|
|
"input_cost_per_token": 1.25e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 16000,
|
|
"max_output_tokens": 2000,
|
|
"mode": "audio_transcription",
|
|
"output_cost_per_token": 5e-06,
|
|
"supported_endpoints": [
|
|
"/v1/audio/transcriptions"
|
|
]
|
|
},
|
|
"azure/gpt-4o-mini-tts": {
|
|
"input_cost_per_token": 2.5e-06,
|
|
"litellm_provider": "azure",
|
|
"mode": "audio_speech",
|
|
"output_cost_per_audio_token": 1.2e-05,
|
|
"output_cost_per_second": 0.00025,
|
|
"output_cost_per_token": 1e-05,
|
|
"supported_endpoints": [
|
|
"/v1/audio/speech"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"audio"
|
|
],
|
|
"supported_output_modalities": [
|
|
"audio"
|
|
]
|
|
},
|
|
"azure/gpt-4o-realtime-preview-2024-10-01": {
|
|
"cache_creation_input_audio_token_cost": 2e-05,
|
|
"cache_read_input_token_cost": 2.5e-06,
|
|
"input_cost_per_audio_token": 0.0001,
|
|
"input_cost_per_token": 5e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_audio_token": 0.0002,
|
|
"output_cost_per_token": 2e-05,
|
|
"supports_audio_input": true,
|
|
"supports_audio_output": true,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"azure/gpt-4o-realtime-preview-2024-12-17": {
|
|
"cache_read_input_token_cost": 2.5e-06,
|
|
"input_cost_per_audio_token": 4e-05,
|
|
"input_cost_per_token": 5e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_audio_token": 8e-05,
|
|
"output_cost_per_token": 2e-05,
|
|
"supported_modalities": [
|
|
"text",
|
|
"audio"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text",
|
|
"audio"
|
|
],
|
|
"supports_audio_input": true,
|
|
"supports_audio_output": true,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"azure/gpt-4o-transcribe": {
|
|
"input_cost_per_audio_token": 6e-06,
|
|
"input_cost_per_token": 2.5e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 16000,
|
|
"max_output_tokens": 2000,
|
|
"mode": "audio_transcription",
|
|
"output_cost_per_token": 1e-05,
|
|
"supported_endpoints": [
|
|
"/v1/audio/transcriptions"
|
|
]
|
|
},
|
|
"azure/gpt-4o-transcribe-diarize": {
|
|
"input_cost_per_audio_token": 6e-06,
|
|
"input_cost_per_token": 2.5e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 16000,
|
|
"max_output_tokens": 2000,
|
|
"mode": "audio_transcription",
|
|
"output_cost_per_token": 1e-05,
|
|
"supported_endpoints": [
|
|
"/v1/audio/transcriptions"
|
|
]
|
|
},
|
|
"azure/gpt-5.1-2025-11-13": {
|
|
"cache_read_input_token_cost": 1.25e-07,
|
|
"cache_read_input_token_cost_priority": 2.5e-07,
|
|
"input_cost_per_token": 1.25e-06,
|
|
"input_cost_per_token_priority": 2.5e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 272000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-05,
|
|
"output_cost_per_token_priority": 2e-05,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_service_tier": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/gpt-5.1-chat-2025-11-13": {
|
|
"cache_read_input_token_cost": 1.25e-07,
|
|
"cache_read_input_token_cost_priority": 2.5e-07,
|
|
"input_cost_per_token": 1.25e-06,
|
|
"input_cost_per_token_priority": 2.5e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-05,
|
|
"output_cost_per_token_priority": 2e-05,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supports_function_calling": false,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": false,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": false,
|
|
"supports_vision": true
|
|
},
|
|
"azure/gpt-5.1-codex-2025-11-13": {
|
|
"cache_read_input_token_cost": 1.25e-07,
|
|
"cache_read_input_token_cost_priority": 2.5e-07,
|
|
"input_cost_per_token": 1.25e-06,
|
|
"input_cost_per_token_priority": 2.5e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 272000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "responses",
|
|
"output_cost_per_token": 1e-05,
|
|
"output_cost_per_token_priority": 2e-05,
|
|
"supported_endpoints": [
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": false,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/gpt-5.1-codex-mini-2025-11-13": {
|
|
"cache_read_input_token_cost": 2.5e-08,
|
|
"cache_read_input_token_cost_priority": 4.5e-08,
|
|
"input_cost_per_token": 2.5e-07,
|
|
"input_cost_per_token_priority": 4.5e-07,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 272000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "responses",
|
|
"output_cost_per_token": 2e-06,
|
|
"output_cost_per_token_priority": 3.6e-06,
|
|
"supported_endpoints": [
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": false,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/gpt-5": {
|
|
"cache_read_input_token_cost": 1.25e-07,
|
|
"input_cost_per_token": 1.25e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 272000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-05,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/batch",
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/gpt-5-2025-08-07": {
|
|
"cache_read_input_token_cost": 1.25e-07,
|
|
"input_cost_per_token": 1.25e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 272000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-05,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/batch",
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/gpt-5-chat": {
|
|
"cache_read_input_token_cost": 1.25e-07,
|
|
"input_cost_per_token": 1.25e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 272000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-05,
|
|
"source": "https://azure.microsoft.com/en-us/blog/gpt-5-in-azure-ai-foundry-the-future-of-ai-apps-and-agents-starts-here/",
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/batch",
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": false,
|
|
"supports_vision": true
|
|
},
|
|
"azure/gpt-5-chat-latest": {
|
|
"cache_read_input_token_cost": 1.25e-07,
|
|
"input_cost_per_token": 1.25e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-05,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/batch",
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": false,
|
|
"supports_vision": true
|
|
},
|
|
"azure/gpt-5-codex": {
|
|
"cache_read_input_token_cost": 1.25e-07,
|
|
"input_cost_per_token": 1.25e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 272000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "responses",
|
|
"output_cost_per_token": 1e-05,
|
|
"supported_endpoints": [
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/gpt-5-mini": {
|
|
"cache_read_input_token_cost": 2.5e-08,
|
|
"input_cost_per_token": 2.5e-07,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 272000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2e-06,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/batch",
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/gpt-5-mini-2025-08-07": {
|
|
"cache_read_input_token_cost": 2.5e-08,
|
|
"input_cost_per_token": 2.5e-07,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 272000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2e-06,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/batch",
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/gpt-5-nano": {
|
|
"cache_read_input_token_cost": 5e-09,
|
|
"input_cost_per_token": 5e-08,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 272000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4e-07,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/batch",
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/gpt-5-nano-2025-08-07": {
|
|
"cache_read_input_token_cost": 5e-09,
|
|
"input_cost_per_token": 5e-08,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 272000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4e-07,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/batch",
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/gpt-5-pro": {
|
|
"input_cost_per_token": 1.5e-05,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 272000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 400000,
|
|
"mode": "responses",
|
|
"output_cost_per_token": 0.00012,
|
|
"source": "https://learn.microsoft.com/en-us/azure/ai-foundry/foundry-models/concepts/models-sold-directly-by-azure?pivots=azure-openai&tabs=global-standard-aoai%2Cstandard-chat-completions%2Cglobal-standard#gpt-5",
|
|
"supported_endpoints": [
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/gpt-5.1": {
|
|
"cache_read_input_token_cost": 1.25e-07,
|
|
"input_cost_per_token": 1.25e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 272000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-05,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/batch",
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/gpt-5.1-chat": {
|
|
"cache_read_input_token_cost": 1.25e-07,
|
|
"input_cost_per_token": 1.25e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 272000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-05,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/batch",
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/gpt-5.1-codex": {
|
|
"cache_read_input_token_cost": 1.25e-07,
|
|
"input_cost_per_token": 1.25e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 272000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "responses",
|
|
"output_cost_per_token": 1e-05,
|
|
"supported_endpoints": [
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": false,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/gpt-5.1-codex-max": {
|
|
"cache_read_input_token_cost": 1.25e-07,
|
|
"input_cost_per_token": 1.25e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 400000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "responses",
|
|
"output_cost_per_token": 1e-05,
|
|
"supported_endpoints": [
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": false,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/gpt-5.1-codex-mini": {
|
|
"cache_read_input_token_cost": 2.5e-08,
|
|
"input_cost_per_token": 2.5e-07,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 272000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "responses",
|
|
"output_cost_per_token": 2e-06,
|
|
"supported_endpoints": [
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": false,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/gpt-image-1": {
|
|
"input_cost_per_pixel": 4.0054321e-08,
|
|
"litellm_provider": "azure",
|
|
"mode": "image_generation",
|
|
"output_cost_per_pixel": 0.0,
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"azure/hd/1024-x-1024/dall-e-3": {
|
|
"input_cost_per_pixel": 7.629e-08,
|
|
"litellm_provider": "azure",
|
|
"mode": "image_generation",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"azure/hd/1024-x-1792/dall-e-3": {
|
|
"input_cost_per_pixel": 6.539e-08,
|
|
"litellm_provider": "azure",
|
|
"mode": "image_generation",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"azure/hd/1792-x-1024/dall-e-3": {
|
|
"input_cost_per_pixel": 6.539e-08,
|
|
"litellm_provider": "azure",
|
|
"mode": "image_generation",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"azure/high/1024-x-1024/gpt-image-1": {
|
|
"input_cost_per_pixel": 1.59263611e-07,
|
|
"litellm_provider": "azure",
|
|
"mode": "image_generation",
|
|
"output_cost_per_pixel": 0.0,
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"azure/high/1024-x-1536/gpt-image-1": {
|
|
"input_cost_per_pixel": 1.58945719e-07,
|
|
"litellm_provider": "azure",
|
|
"mode": "image_generation",
|
|
"output_cost_per_pixel": 0.0,
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"azure/high/1536-x-1024/gpt-image-1": {
|
|
"input_cost_per_pixel": 1.58945719e-07,
|
|
"litellm_provider": "azure",
|
|
"mode": "image_generation",
|
|
"output_cost_per_pixel": 0.0,
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"azure/low/1024-x-1024/gpt-image-1": {
|
|
"input_cost_per_pixel": 1.0490417e-08,
|
|
"litellm_provider": "azure",
|
|
"mode": "image_generation",
|
|
"output_cost_per_pixel": 0.0,
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"azure/low/1024-x-1536/gpt-image-1": {
|
|
"input_cost_per_pixel": 1.0172526e-08,
|
|
"litellm_provider": "azure",
|
|
"mode": "image_generation",
|
|
"output_cost_per_pixel": 0.0,
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"azure/low/1536-x-1024/gpt-image-1": {
|
|
"input_cost_per_pixel": 1.0172526e-08,
|
|
"litellm_provider": "azure",
|
|
"mode": "image_generation",
|
|
"output_cost_per_pixel": 0.0,
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"azure/medium/1024-x-1024/gpt-image-1": {
|
|
"input_cost_per_pixel": 4.0054321e-08,
|
|
"litellm_provider": "azure",
|
|
"mode": "image_generation",
|
|
"output_cost_per_pixel": 0.0,
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"azure/medium/1024-x-1536/gpt-image-1": {
|
|
"input_cost_per_pixel": 4.0054321e-08,
|
|
"litellm_provider": "azure",
|
|
"mode": "image_generation",
|
|
"output_cost_per_pixel": 0.0,
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"azure/medium/1536-x-1024/gpt-image-1": {
|
|
"input_cost_per_pixel": 4.0054321e-08,
|
|
"litellm_provider": "azure",
|
|
"mode": "image_generation",
|
|
"output_cost_per_pixel": 0.0,
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"azure/gpt-image-1-mini": {
|
|
"input_cost_per_pixel": 8.0566406e-09,
|
|
"litellm_provider": "azure",
|
|
"mode": "image_generation",
|
|
"output_cost_per_pixel": 0.0,
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"azure/low/1024-x-1024/gpt-image-1-mini": {
|
|
"input_cost_per_pixel": 2.0751953125e-09,
|
|
"litellm_provider": "azure",
|
|
"mode": "image_generation",
|
|
"output_cost_per_pixel": 0.0,
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"azure/low/1024-x-1536/gpt-image-1-mini": {
|
|
"input_cost_per_pixel": 2.0751953125e-09,
|
|
"litellm_provider": "azure",
|
|
"mode": "image_generation",
|
|
"output_cost_per_pixel": 0.0,
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"azure/low/1536-x-1024/gpt-image-1-mini": {
|
|
"input_cost_per_pixel": 2.0345052083e-09,
|
|
"litellm_provider": "azure",
|
|
"mode": "image_generation",
|
|
"output_cost_per_pixel": 0.0,
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"azure/medium/1024-x-1024/gpt-image-1-mini": {
|
|
"input_cost_per_pixel": 8.056640625e-09,
|
|
"litellm_provider": "azure",
|
|
"mode": "image_generation",
|
|
"output_cost_per_pixel": 0.0,
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"azure/medium/1024-x-1536/gpt-image-1-mini": {
|
|
"input_cost_per_pixel": 8.056640625e-09,
|
|
"litellm_provider": "azure",
|
|
"mode": "image_generation",
|
|
"output_cost_per_pixel": 0.0,
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"azure/medium/1536-x-1024/gpt-image-1-mini": {
|
|
"input_cost_per_pixel": 7.9752604167e-09,
|
|
"litellm_provider": "azure",
|
|
"mode": "image_generation",
|
|
"output_cost_per_pixel": 0.0,
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"azure/high/1024-x-1024/gpt-image-1-mini": {
|
|
"input_cost_per_pixel": 3.173828125e-08,
|
|
"litellm_provider": "azure",
|
|
"mode": "image_generation",
|
|
"output_cost_per_pixel": 0.0,
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"azure/high/1024-x-1536/gpt-image-1-mini": {
|
|
"input_cost_per_pixel": 3.173828125e-08,
|
|
"litellm_provider": "azure",
|
|
"mode": "image_generation",
|
|
"output_cost_per_pixel": 0.0,
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"azure/high/1536-x-1024/gpt-image-1-mini": {
|
|
"input_cost_per_pixel": 3.1575520833e-08,
|
|
"litellm_provider": "azure",
|
|
"mode": "image_generation",
|
|
"output_cost_per_pixel": 0.0,
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"azure/mistral-large-2402": {
|
|
"input_cost_per_token": 8e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 32000,
|
|
"max_tokens": 32000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.4e-05,
|
|
"supports_function_calling": true
|
|
},
|
|
"azure/mistral-large-latest": {
|
|
"input_cost_per_token": 8e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 32000,
|
|
"max_tokens": 32000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.4e-05,
|
|
"supports_function_calling": true
|
|
},
|
|
"azure/o1": {
|
|
"cache_read_input_token_cost": 7.5e-06,
|
|
"input_cost_per_token": 1.5e-05,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 100000,
|
|
"max_tokens": 100000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-05,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/o1-2024-12-17": {
|
|
"cache_read_input_token_cost": 7.5e-06,
|
|
"input_cost_per_token": 1.5e-05,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 100000,
|
|
"max_tokens": 100000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-05,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/o1-mini": {
|
|
"cache_read_input_token_cost": 6.05e-07,
|
|
"input_cost_per_token": 1.21e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 65536,
|
|
"max_tokens": 65536,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4.84e-06,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_vision": false
|
|
},
|
|
"azure/o1-mini-2024-09-12": {
|
|
"cache_read_input_token_cost": 5.5e-07,
|
|
"input_cost_per_token": 1.1e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 65536,
|
|
"max_tokens": 65536,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4.4e-06,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_vision": false
|
|
},
|
|
"azure/o1-preview": {
|
|
"cache_read_input_token_cost": 7.5e-06,
|
|
"input_cost_per_token": 1.5e-05,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-05,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_vision": false
|
|
},
|
|
"azure/o1-preview-2024-09-12": {
|
|
"cache_read_input_token_cost": 7.5e-06,
|
|
"input_cost_per_token": 1.5e-05,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-05,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_vision": false
|
|
},
|
|
"azure/o3": {
|
|
"cache_read_input_token_cost": 5e-07,
|
|
"input_cost_per_token": 2e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 100000,
|
|
"max_tokens": 100000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 8e-06,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/batch",
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": false,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/o3-2025-04-16": {
|
|
"deprecation_date": "2026-04-16",
|
|
"cache_read_input_token_cost": 5e-07,
|
|
"input_cost_per_token": 2e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 100000,
|
|
"max_tokens": 100000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 8e-06,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/batch",
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": false,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/o3-deep-research": {
|
|
"cache_read_input_token_cost": 2.5e-06,
|
|
"input_cost_per_token": 1e-05,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 100000,
|
|
"max_tokens": 100000,
|
|
"mode": "responses",
|
|
"output_cost_per_token": 4e-05,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/batch",
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true
|
|
},
|
|
"azure/o3-mini": {
|
|
"cache_read_input_token_cost": 5.5e-07,
|
|
"input_cost_per_token": 1.1e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 100000,
|
|
"max_tokens": 100000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4.4e-06,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": false
|
|
},
|
|
"azure/o3-mini-2025-01-31": {
|
|
"cache_read_input_token_cost": 5.5e-07,
|
|
"input_cost_per_token": 1.1e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 100000,
|
|
"max_tokens": 100000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4.4e-06,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": false
|
|
},
|
|
"azure/o3-pro": {
|
|
"input_cost_per_token": 2e-05,
|
|
"input_cost_per_token_batches": 1e-05,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 100000,
|
|
"max_tokens": 100000,
|
|
"mode": "responses",
|
|
"output_cost_per_token": 8e-05,
|
|
"output_cost_per_token_batches": 4e-05,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/batch",
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": false,
|
|
"supports_prompt_caching": false,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/o3-pro-2025-06-10": {
|
|
"input_cost_per_token": 2e-05,
|
|
"input_cost_per_token_batches": 1e-05,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 100000,
|
|
"max_tokens": 100000,
|
|
"mode": "responses",
|
|
"output_cost_per_token": 8e-05,
|
|
"output_cost_per_token_batches": 4e-05,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/batch",
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": false,
|
|
"supports_prompt_caching": false,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/o4-mini": {
|
|
"cache_read_input_token_cost": 2.75e-07,
|
|
"input_cost_per_token": 1.1e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 100000,
|
|
"max_tokens": 100000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4.4e-06,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/batch",
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": false,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/o4-mini-2025-04-16": {
|
|
"cache_read_input_token_cost": 2.75e-07,
|
|
"input_cost_per_token": 1.1e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 100000,
|
|
"max_tokens": 100000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4.4e-06,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": false,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/standard/1024-x-1024/dall-e-2": {
|
|
"input_cost_per_pixel": 0.0,
|
|
"litellm_provider": "azure",
|
|
"mode": "image_generation",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"azure/standard/1024-x-1024/dall-e-3": {
|
|
"input_cost_per_pixel": 3.81469e-08,
|
|
"litellm_provider": "azure",
|
|
"mode": "image_generation",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"azure/standard/1024-x-1792/dall-e-3": {
|
|
"input_cost_per_pixel": 4.359e-08,
|
|
"litellm_provider": "azure",
|
|
"mode": "image_generation",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"azure/standard/1792-x-1024/dall-e-3": {
|
|
"input_cost_per_pixel": 4.359e-08,
|
|
"litellm_provider": "azure",
|
|
"mode": "image_generation",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"azure/text-embedding-3-large": {
|
|
"input_cost_per_token": 1.3e-07,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"azure/text-embedding-3-small": {
|
|
"deprecation_date": "2026-04-30",
|
|
"input_cost_per_token": 2e-08,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"azure/text-embedding-ada-002": {
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"azure/speech/azure-tts": {
|
|
"input_cost_per_character": 15e-06,
|
|
"litellm_provider": "azure",
|
|
"mode": "audio_speech",
|
|
"source": "https://azure.microsoft.com/en-us/pricing/calculator/"
|
|
},
|
|
"azure/speech/azure-tts-hd": {
|
|
"input_cost_per_character": 30e-06,
|
|
"litellm_provider": "azure",
|
|
"mode": "audio_speech",
|
|
"source": "https://azure.microsoft.com/en-us/pricing/calculator/"
|
|
},
|
|
"azure/tts-1": {
|
|
"input_cost_per_character": 1.5e-05,
|
|
"litellm_provider": "azure",
|
|
"mode": "audio_speech"
|
|
},
|
|
"azure/tts-1-hd": {
|
|
"input_cost_per_character": 3e-05,
|
|
"litellm_provider": "azure",
|
|
"mode": "audio_speech"
|
|
},
|
|
"azure/us/gpt-4.1-2025-04-14": {
|
|
"deprecation_date": "2026-11-04",
|
|
"cache_read_input_token_cost": 5.5e-07,
|
|
"input_cost_per_token": 2.2e-06,
|
|
"input_cost_per_token_batches": 1.1e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 1047576,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 8.8e-06,
|
|
"output_cost_per_token_batches": 4.4e-06,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/batch",
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": false
|
|
},
|
|
"azure/us/gpt-4.1-mini-2025-04-14": {
|
|
"deprecation_date": "2026-11-04",
|
|
"cache_read_input_token_cost": 1.1e-07,
|
|
"input_cost_per_token": 4.4e-07,
|
|
"input_cost_per_token_batches": 2.2e-07,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 1047576,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.76e-06,
|
|
"output_cost_per_token_batches": 8.8e-07,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/batch",
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": false
|
|
},
|
|
"azure/us/gpt-4.1-nano-2025-04-14": {
|
|
"deprecation_date": "2026-11-04",
|
|
"cache_read_input_token_cost": 2.5e-08,
|
|
"input_cost_per_token": 1.1e-07,
|
|
"input_cost_per_token_batches": 6e-08,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 1047576,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4.4e-07,
|
|
"output_cost_per_token_batches": 2.2e-07,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/batch",
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/us/gpt-4o-2024-08-06": {
|
|
"deprecation_date": "2026-02-27",
|
|
"cache_read_input_token_cost": 1.375e-06,
|
|
"input_cost_per_token": 2.75e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.1e-05,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/us/gpt-4o-2024-11-20": {
|
|
"deprecation_date": "2026-03-01",
|
|
"cache_creation_input_token_cost": 1.38e-06,
|
|
"input_cost_per_token": 2.75e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.1e-05,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/us/gpt-4o-mini-2024-07-18": {
|
|
"cache_read_input_token_cost": 8.3e-08,
|
|
"input_cost_per_token": 1.65e-07,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6.6e-07,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/us/gpt-4o-mini-realtime-preview-2024-12-17": {
|
|
"cache_creation_input_audio_token_cost": 3.3e-07,
|
|
"cache_read_input_token_cost": 3.3e-07,
|
|
"input_cost_per_audio_token": 1.1e-05,
|
|
"input_cost_per_token": 6.6e-07,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_audio_token": 2.2e-05,
|
|
"output_cost_per_token": 2.64e-06,
|
|
"supports_audio_input": true,
|
|
"supports_audio_output": true,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"azure/us/gpt-4o-realtime-preview-2024-10-01": {
|
|
"cache_creation_input_audio_token_cost": 2.2e-05,
|
|
"cache_read_input_token_cost": 2.75e-06,
|
|
"input_cost_per_audio_token": 0.00011,
|
|
"input_cost_per_token": 5.5e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_audio_token": 0.00022,
|
|
"output_cost_per_token": 2.2e-05,
|
|
"supports_audio_input": true,
|
|
"supports_audio_output": true,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"azure/us/gpt-4o-realtime-preview-2024-12-17": {
|
|
"cache_read_input_audio_token_cost": 2.5e-06,
|
|
"cache_read_input_token_cost": 2.75e-06,
|
|
"input_cost_per_audio_token": 4.4e-05,
|
|
"input_cost_per_token": 5.5e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_audio_token": 8e-05,
|
|
"output_cost_per_token": 2.2e-05,
|
|
"supported_modalities": [
|
|
"text",
|
|
"audio"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text",
|
|
"audio"
|
|
],
|
|
"supports_audio_input": true,
|
|
"supports_audio_output": true,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"azure/us/gpt-5-2025-08-07": {
|
|
"cache_read_input_token_cost": 1.375e-07,
|
|
"input_cost_per_token": 1.375e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 272000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.1e-05,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/batch",
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/us/gpt-5-mini-2025-08-07": {
|
|
"cache_read_input_token_cost": 2.75e-08,
|
|
"input_cost_per_token": 2.75e-07,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 272000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.2e-06,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/batch",
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/us/gpt-5-nano-2025-08-07": {
|
|
"cache_read_input_token_cost": 5.5e-09,
|
|
"input_cost_per_token": 5.5e-08,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 272000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4.4e-07,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/batch",
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/us/gpt-5.1": {
|
|
"cache_read_input_token_cost": 1.4e-07,
|
|
"input_cost_per_token": 1.38e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 272000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.1e-05,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/batch",
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/us/gpt-5.1-chat": {
|
|
"cache_read_input_token_cost": 1.4e-07,
|
|
"input_cost_per_token": 1.38e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 272000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.1e-05,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/batch",
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/us/gpt-5.1-codex": {
|
|
"cache_read_input_token_cost": 1.4e-07,
|
|
"input_cost_per_token": 1.38e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 272000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "responses",
|
|
"output_cost_per_token": 1.1e-05,
|
|
"supported_endpoints": [
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": false,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/us/gpt-5.1-codex-mini": {
|
|
"cache_read_input_token_cost": 2.8e-08,
|
|
"input_cost_per_token": 2.75e-07,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 272000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "responses",
|
|
"output_cost_per_token": 2.2e-06,
|
|
"supported_endpoints": [
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": false,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/us/o1-2024-12-17": {
|
|
"cache_read_input_token_cost": 8.25e-06,
|
|
"input_cost_per_token": 1.65e-05,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 100000,
|
|
"max_tokens": 100000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6.6e-05,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/us/o1-mini-2024-09-12": {
|
|
"cache_read_input_token_cost": 6.05e-07,
|
|
"input_cost_per_token": 1.21e-06,
|
|
"input_cost_per_token_batches": 6.05e-07,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 65536,
|
|
"max_tokens": 65536,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4.84e-06,
|
|
"output_cost_per_token_batches": 2.42e-06,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_vision": false
|
|
},
|
|
"azure/us/o1-preview-2024-09-12": {
|
|
"cache_read_input_token_cost": 8.25e-06,
|
|
"input_cost_per_token": 1.65e-05,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6.6e-05,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_vision": false
|
|
},
|
|
"azure/us/o3-2025-04-16": {
|
|
"deprecation_date": "2026-04-16",
|
|
"cache_read_input_token_cost": 5.5e-07,
|
|
"input_cost_per_token": 2.2e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 100000,
|
|
"max_tokens": 100000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 8.8e-06,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/batch",
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": false,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/us/o3-mini-2025-01-31": {
|
|
"cache_read_input_token_cost": 6.05e-07,
|
|
"input_cost_per_token": 1.21e-06,
|
|
"input_cost_per_token_batches": 6.05e-07,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 100000,
|
|
"max_tokens": 100000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4.84e-06,
|
|
"output_cost_per_token_batches": 2.42e-06,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": false
|
|
},
|
|
"azure/us/o4-mini-2025-04-16": {
|
|
"cache_read_input_token_cost": 3.1e-07,
|
|
"input_cost_per_token": 1.21e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 100000,
|
|
"max_tokens": 100000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4.84e-06,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": false,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure/whisper-1": {
|
|
"input_cost_per_second": 0.0001,
|
|
"litellm_provider": "azure",
|
|
"mode": "audio_transcription",
|
|
"output_cost_per_second": 0.0001
|
|
},
|
|
"azure_ai/Cohere-embed-v3-english": {
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "azure_ai",
|
|
"max_input_tokens": 512,
|
|
"max_tokens": 512,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0,
|
|
"output_vector_size": 1024,
|
|
"source": "https://azuremarketplace.microsoft.com/en-us/marketplace/apps/cohere.cohere-embed-v3-english-offer?tab=PlansAndPrice",
|
|
"supports_embedding_image_input": true
|
|
},
|
|
"azure_ai/Cohere-embed-v3-multilingual": {
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "azure_ai",
|
|
"max_input_tokens": 512,
|
|
"max_tokens": 512,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0,
|
|
"output_vector_size": 1024,
|
|
"source": "https://azuremarketplace.microsoft.com/en-us/marketplace/apps/cohere.cohere-embed-v3-english-offer?tab=PlansAndPrice",
|
|
"supports_embedding_image_input": true
|
|
},
|
|
"azure_ai/FLUX-1.1-pro": {
|
|
"litellm_provider": "azure_ai",
|
|
"mode": "image_generation",
|
|
"output_cost_per_image": 0.04,
|
|
"source": "https://techcommunity.microsoft.com/blog/azure-ai-foundry-blog/black-forest-labs-flux-1-kontext-pro-and-flux1-1-pro-now-available-in-azure-ai-f/4434659",
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"azure_ai/FLUX.1-Kontext-pro": {
|
|
"litellm_provider": "azure_ai",
|
|
"mode": "image_generation",
|
|
"output_cost_per_image": 0.04,
|
|
"source": "https://azuremarketplace.microsoft.com/pt-br/marketplace/apps/cohere.cohere-embed-4-offer?tab=PlansAndPrice",
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"azure_ai/Llama-3.2-11B-Vision-Instruct": {
|
|
"input_cost_per_token": 3.7e-07,
|
|
"litellm_provider": "azure_ai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 2048,
|
|
"max_tokens": 2048,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3.7e-07,
|
|
"source": "https://azuremarketplace.microsoft.com/en/marketplace/apps/metagenai.meta-llama-3-2-11b-vision-instruct-offer?tab=Overview",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure_ai/Llama-3.2-90B-Vision-Instruct": {
|
|
"input_cost_per_token": 2.04e-06,
|
|
"litellm_provider": "azure_ai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 2048,
|
|
"max_tokens": 2048,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.04e-06,
|
|
"source": "https://azuremarketplace.microsoft.com/en/marketplace/apps/metagenai.meta-llama-3-2-90b-vision-instruct-offer?tab=Overview",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure_ai/Llama-3.3-70B-Instruct": {
|
|
"input_cost_per_token": 7.1e-07,
|
|
"litellm_provider": "azure_ai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 2048,
|
|
"max_tokens": 2048,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7.1e-07,
|
|
"source": "https://azuremarketplace.microsoft.com/en/marketplace/apps/metagenai.llama-3-3-70b-instruct-offer?tab=Overview",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"azure_ai/Llama-4-Maverick-17B-128E-Instruct-FP8": {
|
|
"input_cost_per_token": 1.41e-06,
|
|
"litellm_provider": "azure_ai",
|
|
"max_input_tokens": 1000000,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3.5e-07,
|
|
"source": "https://azure.microsoft.com/en-us/blog/introducing-the-llama-4-herd-in-azure-ai-foundry-and-azure-databricks/",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure_ai/Llama-4-Scout-17B-16E-Instruct": {
|
|
"input_cost_per_token": 2e-07,
|
|
"litellm_provider": "azure_ai",
|
|
"max_input_tokens": 10000000,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7.8e-07,
|
|
"source": "https://azure.microsoft.com/en-us/blog/introducing-the-llama-4-herd-in-azure-ai-foundry-and-azure-databricks/",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure_ai/Meta-Llama-3-70B-Instruct": {
|
|
"input_cost_per_token": 1.1e-06,
|
|
"litellm_provider": "azure_ai",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 2048,
|
|
"max_tokens": 2048,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3.7e-07,
|
|
"supports_tool_choice": true
|
|
},
|
|
"azure_ai/Meta-Llama-3.1-405B-Instruct": {
|
|
"input_cost_per_token": 5.33e-06,
|
|
"litellm_provider": "azure_ai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 2048,
|
|
"max_tokens": 2048,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.6e-05,
|
|
"source": "https://azuremarketplace.microsoft.com/en-us/marketplace/apps/metagenai.meta-llama-3-1-405b-instruct-offer?tab=PlansAndPrice",
|
|
"supports_tool_choice": true
|
|
},
|
|
"azure_ai/Meta-Llama-3.1-70B-Instruct": {
|
|
"input_cost_per_token": 2.68e-06,
|
|
"litellm_provider": "azure_ai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 2048,
|
|
"max_tokens": 2048,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3.54e-06,
|
|
"source": "https://azuremarketplace.microsoft.com/en-us/marketplace/apps/metagenai.meta-llama-3-1-70b-instruct-offer?tab=PlansAndPrice",
|
|
"supports_tool_choice": true
|
|
},
|
|
"azure_ai/Meta-Llama-3.1-8B-Instruct": {
|
|
"input_cost_per_token": 3e-07,
|
|
"litellm_provider": "azure_ai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 2048,
|
|
"max_tokens": 2048,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6.1e-07,
|
|
"source": "https://azuremarketplace.microsoft.com/en-us/marketplace/apps/metagenai.meta-llama-3-1-8b-instruct-offer?tab=PlansAndPrice",
|
|
"supports_tool_choice": true
|
|
},
|
|
"azure_ai/Phi-3-medium-128k-instruct": {
|
|
"input_cost_per_token": 1.7e-07,
|
|
"litellm_provider": "azure_ai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6.8e-07,
|
|
"source": "https://azure.microsoft.com/en-us/pricing/details/phi-3/",
|
|
"supports_tool_choice": true,
|
|
"supports_vision": false
|
|
},
|
|
"azure_ai/Phi-3-medium-4k-instruct": {
|
|
"input_cost_per_token": 1.7e-07,
|
|
"litellm_provider": "azure_ai",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6.8e-07,
|
|
"source": "https://azure.microsoft.com/en-us/pricing/details/phi-3/",
|
|
"supports_tool_choice": true,
|
|
"supports_vision": false
|
|
},
|
|
"azure_ai/Phi-3-mini-128k-instruct": {
|
|
"input_cost_per_token": 1.3e-07,
|
|
"litellm_provider": "azure_ai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 5.2e-07,
|
|
"source": "https://azure.microsoft.com/en-us/pricing/details/phi-3/",
|
|
"supports_tool_choice": true,
|
|
"supports_vision": false
|
|
},
|
|
"azure_ai/Phi-3-mini-4k-instruct": {
|
|
"input_cost_per_token": 1.3e-07,
|
|
"litellm_provider": "azure_ai",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 5.2e-07,
|
|
"source": "https://azure.microsoft.com/en-us/pricing/details/phi-3/",
|
|
"supports_tool_choice": true,
|
|
"supports_vision": false
|
|
},
|
|
"azure_ai/Phi-3-small-128k-instruct": {
|
|
"input_cost_per_token": 1.5e-07,
|
|
"litellm_provider": "azure_ai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-07,
|
|
"source": "https://azure.microsoft.com/en-us/pricing/details/phi-3/",
|
|
"supports_tool_choice": true,
|
|
"supports_vision": false
|
|
},
|
|
"azure_ai/Phi-3-small-8k-instruct": {
|
|
"input_cost_per_token": 1.5e-07,
|
|
"litellm_provider": "azure_ai",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-07,
|
|
"source": "https://azure.microsoft.com/en-us/pricing/details/phi-3/",
|
|
"supports_tool_choice": true,
|
|
"supports_vision": false
|
|
},
|
|
"azure_ai/Phi-3.5-MoE-instruct": {
|
|
"input_cost_per_token": 1.6e-07,
|
|
"litellm_provider": "azure_ai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6.4e-07,
|
|
"source": "https://azure.microsoft.com/en-us/pricing/details/phi-3/",
|
|
"supports_tool_choice": true,
|
|
"supports_vision": false
|
|
},
|
|
"azure_ai/Phi-3.5-mini-instruct": {
|
|
"input_cost_per_token": 1.3e-07,
|
|
"litellm_provider": "azure_ai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 5.2e-07,
|
|
"source": "https://azure.microsoft.com/en-us/pricing/details/phi-3/",
|
|
"supports_tool_choice": true,
|
|
"supports_vision": false
|
|
},
|
|
"azure_ai/Phi-3.5-vision-instruct": {
|
|
"input_cost_per_token": 1.3e-07,
|
|
"litellm_provider": "azure_ai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 5.2e-07,
|
|
"source": "https://azure.microsoft.com/en-us/pricing/details/phi-3/",
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure_ai/Phi-4": {
|
|
"input_cost_per_token": 1.25e-07,
|
|
"litellm_provider": "azure_ai",
|
|
"max_input_tokens": 16384,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 5e-07,
|
|
"source": "https://techcommunity.microsoft.com/blog/machinelearningblog/affordable-innovation-unveiling-the-pricing-of-phi-3-slms-on-models-as-a-service/4156495",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": false
|
|
},
|
|
"azure_ai/Phi-4-mini-instruct": {
|
|
"input_cost_per_token": 7.5e-08,
|
|
"litellm_provider": "azure_ai",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-07,
|
|
"source": "https://techcommunity.microsoft.com/blog/Azure-AI-Services-blog/announcing-new-phi-pricing-empowering-your-business-with-small-language-models/4395112",
|
|
"supports_function_calling": true
|
|
},
|
|
"azure_ai/Phi-4-multimodal-instruct": {
|
|
"input_cost_per_audio_token": 4e-06,
|
|
"input_cost_per_token": 8e-08,
|
|
"litellm_provider": "azure_ai",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3.2e-07,
|
|
"source": "https://techcommunity.microsoft.com/blog/Azure-AI-Services-blog/announcing-new-phi-pricing-empowering-your-business-with-small-language-models/4395112",
|
|
"supports_audio_input": true,
|
|
"supports_function_calling": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure_ai/Phi-4-mini-reasoning": {
|
|
"input_cost_per_token": 8e-08,
|
|
"litellm_provider": "azure_ai",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3.2e-07,
|
|
"source": "https://azure.microsoft.com/en-us/pricing/details/ai-foundry-models/microsoft/",
|
|
"supports_function_calling": true
|
|
},
|
|
"azure_ai/Phi-4-reasoning": {
|
|
"input_cost_per_token": 1.25e-07,
|
|
"litellm_provider": "azure_ai",
|
|
"max_input_tokens": 32768,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 5e-07,
|
|
"source": "https://azure.microsoft.com/en-us/pricing/details/ai-foundry-models/microsoft/",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_reasoning": true
|
|
},
|
|
"azure_ai/mistral-document-ai-2505": {
|
|
"litellm_provider": "azure_ai",
|
|
"ocr_cost_per_page": 3e-3,
|
|
"mode": "ocr",
|
|
"supported_endpoints": [
|
|
"/v1/ocr"
|
|
],
|
|
"source": "https://devblogs.microsoft.com/foundry/whats-new-in-azure-ai-foundry-august-2025/#mistral-document-ai-(ocr)-%E2%80%94-serverless-in-foundry"
|
|
},
|
|
"azure_ai/doc-intelligence/prebuilt-read": {
|
|
"litellm_provider": "azure_ai",
|
|
"ocr_cost_per_page": 1.5e-3,
|
|
"mode": "ocr",
|
|
"supported_endpoints": [
|
|
"/v1/ocr"
|
|
],
|
|
"source": "https://azure.microsoft.com/en-us/pricing/details/ai-document-intelligence/"
|
|
},
|
|
"azure_ai/doc-intelligence/prebuilt-layout": {
|
|
"litellm_provider": "azure_ai",
|
|
"ocr_cost_per_page": 1e-2,
|
|
"mode": "ocr",
|
|
"supported_endpoints": [
|
|
"/v1/ocr"
|
|
],
|
|
"source": "https://azure.microsoft.com/en-us/pricing/details/ai-document-intelligence/"
|
|
},
|
|
"azure_ai/doc-intelligence/prebuilt-document": {
|
|
"litellm_provider": "azure_ai",
|
|
"ocr_cost_per_page": 1e-2,
|
|
"mode": "ocr",
|
|
"supported_endpoints": [
|
|
"/v1/ocr"
|
|
],
|
|
"source": "https://azure.microsoft.com/en-us/pricing/details/ai-document-intelligence/"
|
|
},
|
|
"azure_ai/MAI-DS-R1": {
|
|
"input_cost_per_token": 1.35e-06,
|
|
"litellm_provider": "azure_ai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 5.4e-06,
|
|
"source": "https://azure.microsoft.com/en-us/pricing/details/ai-foundry-models/microsoft/",
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"azure_ai/cohere-rerank-v3-english": {
|
|
"input_cost_per_query": 0.002,
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "azure_ai",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 4096,
|
|
"max_query_tokens": 2048,
|
|
"max_tokens": 4096,
|
|
"mode": "rerank",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"azure_ai/cohere-rerank-v3-multilingual": {
|
|
"input_cost_per_query": 0.002,
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "azure_ai",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 4096,
|
|
"max_query_tokens": 2048,
|
|
"max_tokens": 4096,
|
|
"mode": "rerank",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"azure_ai/cohere-rerank-v3.5": {
|
|
"input_cost_per_query": 0.002,
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "azure_ai",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 4096,
|
|
"max_query_tokens": 2048,
|
|
"max_tokens": 4096,
|
|
"mode": "rerank",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"azure_ai/deepseek-r1": {
|
|
"input_cost_per_token": 1.35e-06,
|
|
"litellm_provider": "azure_ai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 5.4e-06,
|
|
"source": "https://techcommunity.microsoft.com/blog/machinelearningblog/deepseek-r1-improved-performance-higher-limits-and-transparent-pricing/4386367",
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"azure_ai/deepseek-v3": {
|
|
"input_cost_per_token": 1.14e-06,
|
|
"litellm_provider": "azure_ai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4.56e-06,
|
|
"source": "https://techcommunity.microsoft.com/blog/machinelearningblog/announcing-deepseek-v3-on-azure-ai-foundry-and-github/4390438",
|
|
"supports_tool_choice": true
|
|
},
|
|
"azure_ai/deepseek-v3-0324": {
|
|
"input_cost_per_token": 1.14e-06,
|
|
"litellm_provider": "azure_ai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4.56e-06,
|
|
"source": "https://techcommunity.microsoft.com/blog/machinelearningblog/announcing-deepseek-v3-on-azure-ai-foundry-and-github/4390438",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"azure_ai/embed-v-4-0": {
|
|
"input_cost_per_token": 1.2e-07,
|
|
"litellm_provider": "azure_ai",
|
|
"max_input_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0,
|
|
"output_vector_size": 3072,
|
|
"source": "https://azuremarketplace.microsoft.com/pt-br/marketplace/apps/cohere.cohere-embed-4-offer?tab=PlansAndPrice",
|
|
"supported_endpoints": [
|
|
"/v1/embeddings"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supports_embedding_image_input": true
|
|
},
|
|
"azure_ai/global/grok-3": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "azure_ai",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"source": "https://devblogs.microsoft.com/foundry/announcing-grok-3-and-grok-3-mini-on-azure-ai-foundry/",
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": false,
|
|
"supports_tool_choice": true,
|
|
"supports_web_search": true
|
|
},
|
|
"azure_ai/global/grok-3-mini": {
|
|
"input_cost_per_token": 2.5e-07,
|
|
"litellm_provider": "azure_ai",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.27e-06,
|
|
"source": "https://devblogs.microsoft.com/foundry/announcing-grok-3-and-grok-3-mini-on-azure-ai-foundry/",
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": false,
|
|
"supports_tool_choice": true,
|
|
"supports_web_search": true
|
|
},
|
|
"azure_ai/grok-3": {
|
|
"input_cost_per_token": 3.3e-06,
|
|
"litellm_provider": "azure_ai",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.65e-05,
|
|
"source": "https://devblogs.microsoft.com/foundry/announcing-grok-3-and-grok-3-mini-on-azure-ai-foundry/",
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": false,
|
|
"supports_tool_choice": true,
|
|
"supports_web_search": true
|
|
},
|
|
"azure_ai/grok-3-mini": {
|
|
"input_cost_per_token": 2.75e-07,
|
|
"litellm_provider": "azure_ai",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.38e-06,
|
|
"source": "https://devblogs.microsoft.com/foundry/announcing-grok-3-and-grok-3-mini-on-azure-ai-foundry/",
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": false,
|
|
"supports_tool_choice": true,
|
|
"supports_web_search": true
|
|
},
|
|
"azure_ai/grok-4": {
|
|
"input_cost_per_token": 5.5e-06,
|
|
"litellm_provider": "azure_ai",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.75e-05,
|
|
"source": "https://azure.microsoft.com/en-us/blog/grok-4-is-now-available-in-azure-ai-foundry-unlock-frontier-intelligence-and-business-ready-capabilities/",
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_web_search": true
|
|
},
|
|
"azure_ai/grok-4-fast-non-reasoning": {
|
|
"input_cost_per_token": 0.43e-06,
|
|
"output_cost_per_token": 1.73e-06,
|
|
"litellm_provider": "azure_ai",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_web_search": true
|
|
},
|
|
"azure_ai/grok-4-fast-reasoning": {
|
|
"input_cost_per_token": 0.43e-06,
|
|
"output_cost_per_token": 1.73e-06,
|
|
"litellm_provider": "azure_ai",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"source": "https://techcommunity.microsoft.com/blog/azure-ai-foundry-blog/announcing-the-grok-4-fast-models-from-xai-now-available-in-azure-ai-foundry/4456701",
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_web_search": true
|
|
},
|
|
"azure_ai/grok-code-fast-1": {
|
|
"input_cost_per_token": 3.5e-06,
|
|
"litellm_provider": "azure_ai",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.75e-05,
|
|
"source": "https://azure.microsoft.com/en-us/blog/grok-4-is-now-available-in-azure-ai-foundry-unlock-frontier-intelligence-and-business-ready-capabilities/",
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_web_search": true
|
|
},
|
|
"azure_ai/jais-30b-chat": {
|
|
"input_cost_per_token": 0.0032,
|
|
"litellm_provider": "azure_ai",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.00971,
|
|
"source": "https://azure.microsoft.com/en-us/products/ai-services/ai-foundry/models/jais-30b-chat"
|
|
},
|
|
"azure_ai/jamba-instruct": {
|
|
"input_cost_per_token": 5e-07,
|
|
"litellm_provider": "azure_ai",
|
|
"max_input_tokens": 70000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7e-07,
|
|
"supports_tool_choice": true
|
|
},
|
|
"azure_ai/ministral-3b": {
|
|
"input_cost_per_token": 4e-08,
|
|
"litellm_provider": "azure_ai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4e-08,
|
|
"source": "https://azuremarketplace.microsoft.com/en/marketplace/apps/000-000.ministral-3b-2410-offer?tab=Overview",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"azure_ai/mistral-large": {
|
|
"input_cost_per_token": 4e-06,
|
|
"litellm_provider": "azure_ai",
|
|
"max_input_tokens": 32000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.2e-05,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"azure_ai/mistral-large-2407": {
|
|
"input_cost_per_token": 2e-06,
|
|
"litellm_provider": "azure_ai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-06,
|
|
"source": "https://azuremarketplace.microsoft.com/en/marketplace/apps/000-000.mistral-ai-large-2407-offer?tab=Overview",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"azure_ai/mistral-large-latest": {
|
|
"input_cost_per_token": 2e-06,
|
|
"litellm_provider": "azure_ai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-06,
|
|
"source": "https://azuremarketplace.microsoft.com/en/marketplace/apps/000-000.mistral-ai-large-2407-offer?tab=Overview",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"azure_ai/mistral-large-3": {
|
|
"input_cost_per_token": 5e-07,
|
|
"litellm_provider": "azure_ai",
|
|
"max_input_tokens": 256000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-06,
|
|
"source": "https://azure.microsoft.com/en-us/blog/introducing-mistral-large-3-in-microsoft-foundry-open-capable-and-ready-for-production-workloads/",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"azure_ai/mistral-medium-2505": {
|
|
"input_cost_per_token": 4e-07,
|
|
"litellm_provider": "azure_ai",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2e-06,
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"azure_ai/mistral-nemo": {
|
|
"input_cost_per_token": 1.5e-07,
|
|
"litellm_provider": "azure_ai",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-07,
|
|
"source": "https://azuremarketplace.microsoft.com/en/marketplace/apps/000-000.mistral-nemo-12b-2407?tab=PlansAndPrice",
|
|
"supports_function_calling": true
|
|
},
|
|
"azure_ai/mistral-small": {
|
|
"input_cost_per_token": 1e-06,
|
|
"litellm_provider": "azure_ai",
|
|
"max_input_tokens": 32000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-06,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"azure_ai/mistral-small-2503": {
|
|
"input_cost_per_token": 1e-06,
|
|
"litellm_provider": "azure_ai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-06,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"babbage-002": {
|
|
"input_cost_per_token": 4e-07,
|
|
"litellm_provider": "text-completion-openai",
|
|
"max_input_tokens": 16384,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 16384,
|
|
"mode": "completion",
|
|
"output_cost_per_token": 4e-07
|
|
},
|
|
"bedrock/*/1-month-commitment/cohere.command-light-text-v14": {
|
|
"input_cost_per_second": 0.001902,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_second": 0.001902,
|
|
"supports_tool_choice": true
|
|
},
|
|
"bedrock/*/1-month-commitment/cohere.command-text-v14": {
|
|
"input_cost_per_second": 0.011,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_second": 0.011,
|
|
"supports_tool_choice": true
|
|
},
|
|
"bedrock/*/6-month-commitment/cohere.command-light-text-v14": {
|
|
"input_cost_per_second": 0.0011416,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_second": 0.0011416,
|
|
"supports_tool_choice": true
|
|
},
|
|
"bedrock/*/6-month-commitment/cohere.command-text-v14": {
|
|
"input_cost_per_second": 0.0066027,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_second": 0.0066027,
|
|
"supports_tool_choice": true
|
|
},
|
|
"bedrock/ap-northeast-1/1-month-commitment/anthropic.claude-instant-v1": {
|
|
"input_cost_per_second": 0.01475,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 100000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_second": 0.01475,
|
|
"supports_tool_choice": true
|
|
},
|
|
"bedrock/ap-northeast-1/1-month-commitment/anthropic.claude-v1": {
|
|
"input_cost_per_second": 0.0455,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 100000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_second": 0.0455
|
|
},
|
|
"bedrock/ap-northeast-1/1-month-commitment/anthropic.claude-v2:1": {
|
|
"input_cost_per_second": 0.0455,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 100000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_second": 0.0455,
|
|
"supports_tool_choice": true
|
|
},
|
|
"bedrock/ap-northeast-1/6-month-commitment/anthropic.claude-instant-v1": {
|
|
"input_cost_per_second": 0.008194,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 100000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_second": 0.008194,
|
|
"supports_tool_choice": true
|
|
},
|
|
"bedrock/ap-northeast-1/6-month-commitment/anthropic.claude-v1": {
|
|
"input_cost_per_second": 0.02527,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 100000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_second": 0.02527
|
|
},
|
|
"bedrock/ap-northeast-1/6-month-commitment/anthropic.claude-v2:1": {
|
|
"input_cost_per_second": 0.02527,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 100000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_second": 0.02527,
|
|
"supports_tool_choice": true
|
|
},
|
|
"bedrock/ap-northeast-1/anthropic.claude-instant-v1": {
|
|
"input_cost_per_token": 2.23e-06,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 100000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7.55e-06,
|
|
"supports_tool_choice": true
|
|
},
|
|
"bedrock/ap-northeast-1/anthropic.claude-v1": {
|
|
"input_cost_per_token": 8e-06,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 100000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.4e-05,
|
|
"supports_tool_choice": true
|
|
},
|
|
"bedrock/ap-northeast-1/anthropic.claude-v2:1": {
|
|
"input_cost_per_token": 8e-06,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 100000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.4e-05,
|
|
"supports_tool_choice": true
|
|
},
|
|
"bedrock/ap-south-1/meta.llama3-70b-instruct-v1:0": {
|
|
"input_cost_per_token": 3.18e-06,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4.2e-06
|
|
},
|
|
"bedrock/ap-south-1/meta.llama3-8b-instruct-v1:0": {
|
|
"input_cost_per_token": 3.6e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7.2e-07
|
|
},
|
|
"bedrock/ca-central-1/meta.llama3-70b-instruct-v1:0": {
|
|
"input_cost_per_token": 3.05e-06,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4.03e-06
|
|
},
|
|
"bedrock/ca-central-1/meta.llama3-8b-instruct-v1:0": {
|
|
"input_cost_per_token": 3.5e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6.9e-07
|
|
},
|
|
"bedrock/eu-central-1/1-month-commitment/anthropic.claude-instant-v1": {
|
|
"input_cost_per_second": 0.01635,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 100000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_second": 0.01635,
|
|
"supports_tool_choice": true
|
|
},
|
|
"bedrock/eu-central-1/1-month-commitment/anthropic.claude-v1": {
|
|
"input_cost_per_second": 0.0415,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 100000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_second": 0.0415
|
|
},
|
|
"bedrock/eu-central-1/1-month-commitment/anthropic.claude-v2:1": {
|
|
"input_cost_per_second": 0.0415,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 100000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_second": 0.0415,
|
|
"supports_tool_choice": true
|
|
},
|
|
"bedrock/eu-central-1/6-month-commitment/anthropic.claude-instant-v1": {
|
|
"input_cost_per_second": 0.009083,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 100000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_second": 0.009083,
|
|
"supports_tool_choice": true
|
|
},
|
|
"bedrock/eu-central-1/6-month-commitment/anthropic.claude-v1": {
|
|
"input_cost_per_second": 0.02305,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 100000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_second": 0.02305
|
|
},
|
|
"bedrock/eu-central-1/6-month-commitment/anthropic.claude-v2:1": {
|
|
"input_cost_per_second": 0.02305,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 100000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_second": 0.02305,
|
|
"supports_tool_choice": true
|
|
},
|
|
"bedrock/eu-central-1/anthropic.claude-instant-v1": {
|
|
"input_cost_per_token": 2.48e-06,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 100000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 8.38e-06,
|
|
"supports_tool_choice": true
|
|
},
|
|
"bedrock/eu-central-1/anthropic.claude-v1": {
|
|
"input_cost_per_token": 8e-06,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 100000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.4e-05
|
|
},
|
|
"bedrock/eu-central-1/anthropic.claude-v2:1": {
|
|
"input_cost_per_token": 8e-06,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 100000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.4e-05,
|
|
"supports_tool_choice": true
|
|
},
|
|
"bedrock/eu-west-1/meta.llama3-70b-instruct-v1:0": {
|
|
"input_cost_per_token": 2.86e-06,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3.78e-06
|
|
},
|
|
"bedrock/eu-west-1/meta.llama3-8b-instruct-v1:0": {
|
|
"input_cost_per_token": 3.2e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6.5e-07
|
|
},
|
|
"bedrock/eu-west-2/meta.llama3-70b-instruct-v1:0": {
|
|
"input_cost_per_token": 3.45e-06,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4.55e-06
|
|
},
|
|
"bedrock/eu-west-2/meta.llama3-8b-instruct-v1:0": {
|
|
"input_cost_per_token": 3.9e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7.8e-07
|
|
},
|
|
"bedrock/eu-west-3/mistral.mistral-7b-instruct-v0:2": {
|
|
"input_cost_per_token": 2e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 32000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.6e-07,
|
|
"supports_tool_choice": true
|
|
},
|
|
"bedrock/eu-west-3/mistral.mistral-large-2402-v1:0": {
|
|
"input_cost_per_token": 1.04e-05,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 32000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3.12e-05,
|
|
"supports_function_calling": true
|
|
},
|
|
"bedrock/eu-west-3/mistral.mixtral-8x7b-instruct-v0:1": {
|
|
"input_cost_per_token": 5.9e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 32000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 9.1e-07,
|
|
"supports_tool_choice": true
|
|
},
|
|
"bedrock/invoke/anthropic.claude-3-5-sonnet-20240620-v1:0": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"metadata": {
|
|
"notes": "Anthropic via Invoke route does not currently support pdf input."
|
|
},
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"bedrock/sa-east-1/meta.llama3-70b-instruct-v1:0": {
|
|
"input_cost_per_token": 4.45e-06,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 5.88e-06
|
|
},
|
|
"bedrock/sa-east-1/meta.llama3-8b-instruct-v1:0": {
|
|
"input_cost_per_token": 5e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.01e-06
|
|
},
|
|
"bedrock/us-east-1/1-month-commitment/anthropic.claude-instant-v1": {
|
|
"input_cost_per_second": 0.011,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 100000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_second": 0.011,
|
|
"supports_tool_choice": true
|
|
},
|
|
"bedrock/us-east-1/1-month-commitment/anthropic.claude-v1": {
|
|
"input_cost_per_second": 0.0175,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 100000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_second": 0.0175
|
|
},
|
|
"bedrock/us-east-1/1-month-commitment/anthropic.claude-v2:1": {
|
|
"input_cost_per_second": 0.0175,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 100000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_second": 0.0175,
|
|
"supports_tool_choice": true
|
|
},
|
|
"bedrock/us-east-1/6-month-commitment/anthropic.claude-instant-v1": {
|
|
"input_cost_per_second": 0.00611,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 100000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_second": 0.00611,
|
|
"supports_tool_choice": true
|
|
},
|
|
"bedrock/us-east-1/6-month-commitment/anthropic.claude-v1": {
|
|
"input_cost_per_second": 0.00972,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 100000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_second": 0.00972
|
|
},
|
|
"bedrock/us-east-1/6-month-commitment/anthropic.claude-v2:1": {
|
|
"input_cost_per_second": 0.00972,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 100000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_second": 0.00972,
|
|
"supports_tool_choice": true
|
|
},
|
|
"bedrock/us-east-1/anthropic.claude-instant-v1": {
|
|
"input_cost_per_token": 8e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 100000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.4e-06,
|
|
"supports_tool_choice": true
|
|
},
|
|
"bedrock/us-east-1/anthropic.claude-v1": {
|
|
"input_cost_per_token": 8e-06,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 100000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.4e-05,
|
|
"supports_tool_choice": true
|
|
},
|
|
"bedrock/us-east-1/anthropic.claude-v2:1": {
|
|
"input_cost_per_token": 8e-06,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 100000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.4e-05,
|
|
"supports_tool_choice": true
|
|
},
|
|
"bedrock/us-east-1/meta.llama3-70b-instruct-v1:0": {
|
|
"input_cost_per_token": 2.65e-06,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3.5e-06
|
|
},
|
|
"bedrock/us-east-1/meta.llama3-8b-instruct-v1:0": {
|
|
"input_cost_per_token": 3e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-07
|
|
},
|
|
"bedrock/us-east-1/mistral.mistral-7b-instruct-v0:2": {
|
|
"input_cost_per_token": 1.5e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 32000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2e-07,
|
|
"supports_tool_choice": true
|
|
},
|
|
"bedrock/us-east-1/mistral.mistral-large-2402-v1:0": {
|
|
"input_cost_per_token": 8e-06,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 32000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.4e-05,
|
|
"supports_function_calling": true
|
|
},
|
|
"bedrock/us-east-1/mistral.mixtral-8x7b-instruct-v0:1": {
|
|
"input_cost_per_token": 4.5e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 32000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7e-07,
|
|
"supports_tool_choice": true
|
|
},
|
|
"bedrock/us-gov-east-1/amazon.nova-pro-v1:0": {
|
|
"input_cost_per_token": 9.6e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 300000,
|
|
"max_output_tokens": 10000,
|
|
"max_tokens": 10000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3.84e-06,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_vision": true
|
|
},
|
|
"bedrock/us-gov-east-1/amazon.titan-embed-text-v1": {
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0,
|
|
"output_vector_size": 1536
|
|
},
|
|
"bedrock/us-gov-east-1/amazon.titan-embed-text-v2:0": {
|
|
"input_cost_per_token": 2e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0,
|
|
"output_vector_size": 1024
|
|
},
|
|
"bedrock/us-gov-east-1/amazon.titan-text-express-v1": {
|
|
"input_cost_per_token": 1.3e-06,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 42000,
|
|
"max_output_tokens": 8000,
|
|
"max_tokens": 8000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.7e-06
|
|
},
|
|
"bedrock/us-gov-east-1/amazon.titan-text-lite-v1": {
|
|
"input_cost_per_token": 3e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 42000,
|
|
"max_output_tokens": 4000,
|
|
"max_tokens": 4000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4e-07
|
|
},
|
|
"bedrock/us-gov-east-1/amazon.titan-text-premier-v1:0": {
|
|
"input_cost_per_token": 5e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 42000,
|
|
"max_output_tokens": 32000,
|
|
"max_tokens": 32000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-06
|
|
},
|
|
"bedrock/us-gov-east-1/anthropic.claude-3-5-sonnet-20240620-v1:0": {
|
|
"input_cost_per_token": 3.6e-06,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.8e-05,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"bedrock/us-gov-east-1/anthropic.claude-3-haiku-20240307-v1:0": {
|
|
"input_cost_per_token": 3e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-06,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"bedrock/us-gov-east-1/claude-sonnet-4-5-20250929-v1:0": {
|
|
"input_cost_per_token": 3.3e-06,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.65e-05,
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"bedrock/us-gov-east-1/meta.llama3-70b-instruct-v1:0": {
|
|
"input_cost_per_token": 2.65e-06,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 8000,
|
|
"max_output_tokens": 2048,
|
|
"max_tokens": 2048,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3.5e-06,
|
|
"supports_pdf_input": true
|
|
},
|
|
"bedrock/us-gov-east-1/meta.llama3-8b-instruct-v1:0": {
|
|
"input_cost_per_token": 3e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 8000,
|
|
"max_output_tokens": 2048,
|
|
"max_tokens": 2048,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.65e-06,
|
|
"supports_pdf_input": true
|
|
},
|
|
"bedrock/us-gov-west-1/amazon.nova-pro-v1:0": {
|
|
"input_cost_per_token": 9.6e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 300000,
|
|
"max_output_tokens": 10000,
|
|
"max_tokens": 10000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3.84e-06,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_vision": true
|
|
},
|
|
"bedrock/us-gov-west-1/amazon.titan-embed-text-v1": {
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0,
|
|
"output_vector_size": 1536
|
|
},
|
|
"bedrock/us-gov-west-1/amazon.titan-embed-text-v2:0": {
|
|
"input_cost_per_token": 2e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0,
|
|
"output_vector_size": 1024
|
|
},
|
|
"bedrock/us-gov-west-1/amazon.titan-text-express-v1": {
|
|
"input_cost_per_token": 1.3e-06,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 42000,
|
|
"max_output_tokens": 8000,
|
|
"max_tokens": 8000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.7e-06
|
|
},
|
|
"bedrock/us-gov-west-1/amazon.titan-text-lite-v1": {
|
|
"input_cost_per_token": 3e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 42000,
|
|
"max_output_tokens": 4000,
|
|
"max_tokens": 4000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4e-07
|
|
},
|
|
"bedrock/us-gov-west-1/amazon.titan-text-premier-v1:0": {
|
|
"input_cost_per_token": 5e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 42000,
|
|
"max_output_tokens": 32000,
|
|
"max_tokens": 32000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-06
|
|
},
|
|
"bedrock/us-gov-west-1/anthropic.claude-3-7-sonnet-20250219-v1:0": {
|
|
"cache_creation_input_token_cost": 4.5e-06,
|
|
"cache_read_input_token_cost": 3.6e-07,
|
|
"input_cost_per_token": 3.6e-06,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.8e-05,
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"bedrock/us-gov-west-1/anthropic.claude-3-5-sonnet-20240620-v1:0": {
|
|
"input_cost_per_token": 3.6e-06,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.8e-05,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"bedrock/us-gov-west-1/anthropic.claude-3-haiku-20240307-v1:0": {
|
|
"input_cost_per_token": 3e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-06,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"bedrock/us-gov-west-1/claude-sonnet-4-5-20250929-v1:0": {
|
|
"input_cost_per_token": 3.3e-06,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.65e-05,
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"bedrock/us-gov-west-1/meta.llama3-70b-instruct-v1:0": {
|
|
"input_cost_per_token": 2.65e-06,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 8000,
|
|
"max_output_tokens": 2048,
|
|
"max_tokens": 2048,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3.5e-06,
|
|
"supports_pdf_input": true
|
|
},
|
|
"bedrock/us-gov-west-1/meta.llama3-8b-instruct-v1:0": {
|
|
"input_cost_per_token": 3e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 8000,
|
|
"max_output_tokens": 2048,
|
|
"max_tokens": 2048,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.65e-06,
|
|
"supports_pdf_input": true
|
|
},
|
|
"bedrock/us-west-1/meta.llama3-70b-instruct-v1:0": {
|
|
"input_cost_per_token": 2.65e-06,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3.5e-06
|
|
},
|
|
"bedrock/us-west-1/meta.llama3-8b-instruct-v1:0": {
|
|
"input_cost_per_token": 3e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-07
|
|
},
|
|
"bedrock/us-west-2/1-month-commitment/anthropic.claude-instant-v1": {
|
|
"input_cost_per_second": 0.011,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 100000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_second": 0.011,
|
|
"supports_tool_choice": true
|
|
},
|
|
"bedrock/us-west-2/1-month-commitment/anthropic.claude-v1": {
|
|
"input_cost_per_second": 0.0175,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 100000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_second": 0.0175
|
|
},
|
|
"bedrock/us-west-2/1-month-commitment/anthropic.claude-v2:1": {
|
|
"input_cost_per_second": 0.0175,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 100000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_second": 0.0175,
|
|
"supports_tool_choice": true
|
|
},
|
|
"bedrock/us-west-2/6-month-commitment/anthropic.claude-instant-v1": {
|
|
"input_cost_per_second": 0.00611,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 100000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_second": 0.00611,
|
|
"supports_tool_choice": true
|
|
},
|
|
"bedrock/us-west-2/6-month-commitment/anthropic.claude-v1": {
|
|
"input_cost_per_second": 0.00972,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 100000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_second": 0.00972
|
|
},
|
|
"bedrock/us-west-2/6-month-commitment/anthropic.claude-v2:1": {
|
|
"input_cost_per_second": 0.00972,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 100000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_second": 0.00972,
|
|
"supports_tool_choice": true
|
|
},
|
|
"bedrock/us-west-2/anthropic.claude-instant-v1": {
|
|
"input_cost_per_token": 8e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 100000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.4e-06,
|
|
"supports_tool_choice": true
|
|
},
|
|
"bedrock/us-west-2/anthropic.claude-v1": {
|
|
"input_cost_per_token": 8e-06,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 100000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.4e-05,
|
|
"supports_tool_choice": true
|
|
},
|
|
"bedrock/us-west-2/anthropic.claude-v2:1": {
|
|
"input_cost_per_token": 8e-06,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 100000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.4e-05,
|
|
"supports_tool_choice": true
|
|
},
|
|
"bedrock/us-west-2/mistral.mistral-7b-instruct-v0:2": {
|
|
"input_cost_per_token": 1.5e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 32000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2e-07,
|
|
"supports_tool_choice": true
|
|
},
|
|
"bedrock/us-west-2/mistral.mistral-large-2402-v1:0": {
|
|
"input_cost_per_token": 8e-06,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 32000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.4e-05,
|
|
"supports_function_calling": true
|
|
},
|
|
"bedrock/us-west-2/mistral.mixtral-8x7b-instruct-v0:1": {
|
|
"input_cost_per_token": 4.5e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 32000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7e-07,
|
|
"supports_tool_choice": true
|
|
},
|
|
"bedrock/us.anthropic.claude-3-5-haiku-20241022-v1:0": {
|
|
"cache_creation_input_token_cost": 1e-06,
|
|
"cache_read_input_token_cost": 8e-08,
|
|
"input_cost_per_token": 8e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4e-06,
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"cerebras/llama-3.3-70b": {
|
|
"input_cost_per_token": 8.5e-07,
|
|
"litellm_provider": "cerebras",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.2e-06,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"cerebras/llama3.1-70b": {
|
|
"input_cost_per_token": 6e-07,
|
|
"litellm_provider": "cerebras",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-07,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"cerebras/llama3.1-8b": {
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "cerebras",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-07,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"cerebras/gpt-oss-120b": {
|
|
"input_cost_per_token": 2.5e-07,
|
|
"litellm_provider": "cerebras",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6.9e-07,
|
|
"source": "https://www.cerebras.ai/blog/openai-gpt-oss-120b-runs-fastest-on-cerebras",
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"cerebras/qwen-3-32b": {
|
|
"input_cost_per_token": 4e-07,
|
|
"litellm_provider": "cerebras",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 8e-07,
|
|
"source": "https://inference-docs.cerebras.ai/support/pricing",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"chat-bison": {
|
|
"input_cost_per_character": 2.5e-07,
|
|
"input_cost_per_token": 1.25e-07,
|
|
"litellm_provider": "vertex_ai-chat-models",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_character": 5e-07,
|
|
"output_cost_per_token": 1.25e-07,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models",
|
|
"supports_tool_choice": true
|
|
},
|
|
"chat-bison-32k": {
|
|
"input_cost_per_character": 2.5e-07,
|
|
"input_cost_per_token": 1.25e-07,
|
|
"litellm_provider": "vertex_ai-chat-models",
|
|
"max_input_tokens": 32000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_character": 5e-07,
|
|
"output_cost_per_token": 1.25e-07,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models",
|
|
"supports_tool_choice": true
|
|
},
|
|
"chat-bison-32k@002": {
|
|
"input_cost_per_character": 2.5e-07,
|
|
"input_cost_per_token": 1.25e-07,
|
|
"litellm_provider": "vertex_ai-chat-models",
|
|
"max_input_tokens": 32000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_character": 5e-07,
|
|
"output_cost_per_token": 1.25e-07,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models",
|
|
"supports_tool_choice": true
|
|
},
|
|
"chat-bison@001": {
|
|
"input_cost_per_character": 2.5e-07,
|
|
"input_cost_per_token": 1.25e-07,
|
|
"litellm_provider": "vertex_ai-chat-models",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_character": 5e-07,
|
|
"output_cost_per_token": 1.25e-07,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models",
|
|
"supports_tool_choice": true
|
|
},
|
|
"chat-bison@002": {
|
|
"deprecation_date": "2025-04-09",
|
|
"input_cost_per_character": 2.5e-07,
|
|
"input_cost_per_token": 1.25e-07,
|
|
"litellm_provider": "vertex_ai-chat-models",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_character": 5e-07,
|
|
"output_cost_per_token": 1.25e-07,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models",
|
|
"supports_tool_choice": true
|
|
},
|
|
"chatdolphin": {
|
|
"input_cost_per_token": 5e-07,
|
|
"litellm_provider": "nlp_cloud",
|
|
"max_input_tokens": 16384,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 5e-07
|
|
},
|
|
"chatgpt-4o-latest": {
|
|
"input_cost_per_token": 5e-06,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"claude-3-5-haiku-20241022": {
|
|
"cache_creation_input_token_cost": 1e-06,
|
|
"cache_creation_input_token_cost_above_1hr": 6e-06,
|
|
"cache_read_input_token_cost": 8e-08,
|
|
"deprecation_date": "2025-10-01",
|
|
"input_cost_per_token": 8e-07,
|
|
"litellm_provider": "anthropic",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4e-06,
|
|
"search_context_cost_per_query": {
|
|
"search_context_size_high": 0.01,
|
|
"search_context_size_low": 0.01,
|
|
"search_context_size_medium": 0.01
|
|
},
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true,
|
|
"tool_use_system_prompt_tokens": 264
|
|
},
|
|
"claude-3-5-haiku-latest": {
|
|
"cache_creation_input_token_cost": 1.25e-06,
|
|
"cache_creation_input_token_cost_above_1hr": 6e-06,
|
|
"cache_read_input_token_cost": 1e-07,
|
|
"deprecation_date": "2025-10-01",
|
|
"input_cost_per_token": 1e-06,
|
|
"litellm_provider": "anthropic",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 5e-06,
|
|
"search_context_cost_per_query": {
|
|
"search_context_size_high": 0.01,
|
|
"search_context_size_low": 0.01,
|
|
"search_context_size_medium": 0.01
|
|
},
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true,
|
|
"tool_use_system_prompt_tokens": 264
|
|
},
|
|
"claude-haiku-4-5-20251001": {
|
|
"cache_creation_input_token_cost": 1.25e-06,
|
|
"cache_creation_input_token_cost_above_1hr": 2e-06,
|
|
"cache_read_input_token_cost": 1e-07,
|
|
"input_cost_per_token": 1e-06,
|
|
"litellm_provider": "anthropic",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 64000,
|
|
"max_tokens": 64000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 5e-06,
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_computer_use": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"claude-haiku-4-5": {
|
|
"cache_creation_input_token_cost": 1.25e-06,
|
|
"cache_creation_input_token_cost_above_1hr": 2e-06,
|
|
"cache_read_input_token_cost": 1e-07,
|
|
"input_cost_per_token": 1e-06,
|
|
"litellm_provider": "anthropic",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 64000,
|
|
"max_tokens": 64000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 5e-06,
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_computer_use": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"claude-3-5-sonnet-20240620": {
|
|
"cache_creation_input_token_cost": 3.75e-06,
|
|
"cache_creation_input_token_cost_above_1hr": 6e-06,
|
|
"cache_read_input_token_cost": 3e-07,
|
|
"deprecation_date": "2025-06-01",
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "anthropic",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tool_use_system_prompt_tokens": 159
|
|
},
|
|
"claude-3-5-sonnet-20241022": {
|
|
"cache_creation_input_token_cost": 3.75e-06,
|
|
"cache_creation_input_token_cost_above_1hr": 6e-06,
|
|
"cache_read_input_token_cost": 3e-07,
|
|
"deprecation_date": "2025-10-01",
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "anthropic",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"search_context_cost_per_query": {
|
|
"search_context_size_high": 0.01,
|
|
"search_context_size_low": 0.01,
|
|
"search_context_size_medium": 0.01
|
|
},
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true,
|
|
"tool_use_system_prompt_tokens": 159
|
|
},
|
|
"claude-3-5-sonnet-latest": {
|
|
"cache_creation_input_token_cost": 3.75e-06,
|
|
"cache_creation_input_token_cost_above_1hr": 6e-06,
|
|
"cache_read_input_token_cost": 3e-07,
|
|
"deprecation_date": "2025-06-01",
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "anthropic",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"search_context_cost_per_query": {
|
|
"search_context_size_high": 0.01,
|
|
"search_context_size_low": 0.01,
|
|
"search_context_size_medium": 0.01
|
|
},
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true,
|
|
"tool_use_system_prompt_tokens": 159
|
|
},
|
|
"claude-3-7-sonnet-20250219": {
|
|
"cache_creation_input_token_cost": 3.75e-06,
|
|
"cache_creation_input_token_cost_above_1hr": 6e-06,
|
|
"cache_read_input_token_cost": 3e-07,
|
|
"deprecation_date": "2026-02-19",
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "anthropic",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"search_context_cost_per_query": {
|
|
"search_context_size_high": 0.01,
|
|
"search_context_size_low": 0.01,
|
|
"search_context_size_medium": 0.01
|
|
},
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true,
|
|
"tool_use_system_prompt_tokens": 159
|
|
},
|
|
"claude-3-7-sonnet-latest": {
|
|
"cache_creation_input_token_cost": 3.75e-06,
|
|
"cache_creation_input_token_cost_above_1hr": 6e-06,
|
|
"cache_read_input_token_cost": 3e-07,
|
|
"deprecation_date": "2025-06-01",
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "anthropic",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"search_context_cost_per_query": {
|
|
"search_context_size_high": 0.01,
|
|
"search_context_size_low": 0.01,
|
|
"search_context_size_medium": 0.01
|
|
},
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tool_use_system_prompt_tokens": 159
|
|
},
|
|
"claude-3-haiku-20240307": {
|
|
"cache_creation_input_token_cost": 3e-07,
|
|
"cache_creation_input_token_cost_above_1hr": 6e-06,
|
|
"cache_read_input_token_cost": 3e-08,
|
|
"input_cost_per_token": 2.5e-07,
|
|
"litellm_provider": "anthropic",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.25e-06,
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tool_use_system_prompt_tokens": 264
|
|
},
|
|
"claude-3-opus-20240229": {
|
|
"cache_creation_input_token_cost": 1.875e-05,
|
|
"cache_creation_input_token_cost_above_1hr": 6e-06,
|
|
"cache_read_input_token_cost": 1.5e-06,
|
|
"deprecation_date": "2026-05-01",
|
|
"input_cost_per_token": 1.5e-05,
|
|
"litellm_provider": "anthropic",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7.5e-05,
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tool_use_system_prompt_tokens": 395
|
|
},
|
|
"claude-3-opus-latest": {
|
|
"cache_creation_input_token_cost": 1.875e-05,
|
|
"cache_creation_input_token_cost_above_1hr": 6e-06,
|
|
"cache_read_input_token_cost": 1.5e-06,
|
|
"deprecation_date": "2025-03-01",
|
|
"input_cost_per_token": 1.5e-05,
|
|
"litellm_provider": "anthropic",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7.5e-05,
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tool_use_system_prompt_tokens": 395
|
|
},
|
|
"claude-4-opus-20250514": {
|
|
"cache_creation_input_token_cost": 1.875e-05,
|
|
"cache_read_input_token_cost": 1.5e-06,
|
|
"input_cost_per_token": 1.5e-05,
|
|
"litellm_provider": "anthropic",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 32000,
|
|
"max_tokens": 32000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7.5e-05,
|
|
"search_context_cost_per_query": {
|
|
"search_context_size_high": 0.01,
|
|
"search_context_size_low": 0.01,
|
|
"search_context_size_medium": 0.01
|
|
},
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tool_use_system_prompt_tokens": 159
|
|
},
|
|
"claude-4-sonnet-20250514": {
|
|
"cache_creation_input_token_cost": 3.75e-06,
|
|
"cache_creation_input_token_cost_above_200k_tokens": 7.5e-06,
|
|
"cache_read_input_token_cost": 3e-07,
|
|
"cache_read_input_token_cost_above_200k_tokens": 6e-07,
|
|
"input_cost_per_token": 3e-06,
|
|
"input_cost_per_token_above_200k_tokens": 6e-06,
|
|
"litellm_provider": "anthropic",
|
|
"max_input_tokens": 1000000,
|
|
"max_output_tokens": 64000,
|
|
"max_tokens": 1000000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"output_cost_per_token_above_200k_tokens": 2.25e-05,
|
|
"search_context_cost_per_query": {
|
|
"search_context_size_high": 0.01,
|
|
"search_context_size_low": 0.01,
|
|
"search_context_size_medium": 0.01
|
|
},
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tool_use_system_prompt_tokens": 159
|
|
},
|
|
"claude-sonnet-4-5": {
|
|
"cache_creation_input_token_cost": 3.75e-06,
|
|
"cache_read_input_token_cost": 3e-07,
|
|
"input_cost_per_token": 3e-06,
|
|
"input_cost_per_token_above_200k_tokens": 6e-06,
|
|
"output_cost_per_token_above_200k_tokens": 2.25e-05,
|
|
"cache_creation_input_token_cost_above_200k_tokens": 7.5e-06,
|
|
"cache_read_input_token_cost_above_200k_tokens": 6e-07,
|
|
"litellm_provider": "anthropic",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 64000,
|
|
"max_tokens": 64000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"search_context_cost_per_query": {
|
|
"search_context_size_high": 0.01,
|
|
"search_context_size_low": 0.01,
|
|
"search_context_size_medium": 0.01
|
|
},
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tool_use_system_prompt_tokens": 346
|
|
},
|
|
"claude-sonnet-4-5-20250929": {
|
|
"cache_creation_input_token_cost": 3.75e-06,
|
|
"cache_read_input_token_cost": 3e-07,
|
|
"input_cost_per_token": 3e-06,
|
|
"input_cost_per_token_above_200k_tokens": 6e-06,
|
|
"output_cost_per_token_above_200k_tokens": 2.25e-05,
|
|
"cache_creation_input_token_cost_above_200k_tokens": 7.5e-06,
|
|
"cache_read_input_token_cost_above_200k_tokens": 6e-07,
|
|
"litellm_provider": "anthropic",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 64000,
|
|
"max_tokens": 64000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"search_context_cost_per_query": {
|
|
"search_context_size_high": 0.01,
|
|
"search_context_size_low": 0.01,
|
|
"search_context_size_medium": 0.01
|
|
},
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true,
|
|
"tool_use_system_prompt_tokens": 346
|
|
},
|
|
"claude-sonnet-4-5-20250929-v1:0": {
|
|
"cache_creation_input_token_cost": 3.75e-06,
|
|
"cache_read_input_token_cost": 3e-07,
|
|
"input_cost_per_token": 3e-06,
|
|
"input_cost_per_token_above_200k_tokens": 6e-06,
|
|
"output_cost_per_token_above_200k_tokens": 2.25e-05,
|
|
"cache_creation_input_token_cost_above_200k_tokens": 7.5e-06,
|
|
"cache_read_input_token_cost_above_200k_tokens": 6e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 64000,
|
|
"max_tokens": 64000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tool_use_system_prompt_tokens": 159
|
|
},
|
|
"claude-opus-4-1": {
|
|
"cache_creation_input_token_cost": 1.875e-05,
|
|
"cache_creation_input_token_cost_above_1hr": 3e-05,
|
|
"cache_read_input_token_cost": 1.5e-06,
|
|
"input_cost_per_token": 1.5e-05,
|
|
"litellm_provider": "anthropic",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 32000,
|
|
"max_tokens": 32000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7.5e-05,
|
|
"search_context_cost_per_query": {
|
|
"search_context_size_high": 0.01,
|
|
"search_context_size_low": 0.01,
|
|
"search_context_size_medium": 0.01
|
|
},
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tool_use_system_prompt_tokens": 159
|
|
},
|
|
"claude-opus-4-1-20250805": {
|
|
"cache_creation_input_token_cost": 1.875e-05,
|
|
"cache_creation_input_token_cost_above_1hr": 3e-05,
|
|
"cache_read_input_token_cost": 1.5e-06,
|
|
"input_cost_per_token": 1.5e-05,
|
|
"deprecation_date": "2026-08-05",
|
|
"litellm_provider": "anthropic",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 32000,
|
|
"max_tokens": 32000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7.5e-05,
|
|
"search_context_cost_per_query": {
|
|
"search_context_size_high": 0.01,
|
|
"search_context_size_low": 0.01,
|
|
"search_context_size_medium": 0.01
|
|
},
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tool_use_system_prompt_tokens": 159
|
|
},
|
|
"claude-opus-4-20250514": {
|
|
"cache_creation_input_token_cost": 1.875e-05,
|
|
"cache_creation_input_token_cost_above_1hr": 3e-05,
|
|
"cache_read_input_token_cost": 1.5e-06,
|
|
"input_cost_per_token": 1.5e-05,
|
|
"deprecation_date": "2026-05-14",
|
|
"litellm_provider": "anthropic",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 32000,
|
|
"max_tokens": 32000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7.5e-05,
|
|
"search_context_cost_per_query": {
|
|
"search_context_size_high": 0.01,
|
|
"search_context_size_low": 0.01,
|
|
"search_context_size_medium": 0.01
|
|
},
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tool_use_system_prompt_tokens": 159
|
|
},
|
|
"claude-opus-4-5-20251101": {
|
|
"cache_creation_input_token_cost": 6.25e-06,
|
|
"cache_creation_input_token_cost_above_1hr": 1e-05,
|
|
"cache_read_input_token_cost": 5e-07,
|
|
"input_cost_per_token": 5e-06,
|
|
"litellm_provider": "anthropic",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 64000,
|
|
"max_tokens": 64000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.5e-05,
|
|
"search_context_cost_per_query": {
|
|
"search_context_size_high": 0.01,
|
|
"search_context_size_low": 0.01,
|
|
"search_context_size_medium": 0.01
|
|
},
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tool_use_system_prompt_tokens": 159
|
|
},
|
|
"claude-opus-4-5": {
|
|
"cache_creation_input_token_cost": 6.25e-06,
|
|
"cache_creation_input_token_cost_above_1hr": 1e-05,
|
|
"cache_read_input_token_cost": 5e-07,
|
|
"input_cost_per_token": 5e-06,
|
|
"litellm_provider": "anthropic",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 64000,
|
|
"max_tokens": 64000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.5e-05,
|
|
"search_context_cost_per_query": {
|
|
"search_context_size_high": 0.01,
|
|
"search_context_size_low": 0.01,
|
|
"search_context_size_medium": 0.01
|
|
},
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tool_use_system_prompt_tokens": 159
|
|
},
|
|
"claude-sonnet-4-20250514": {
|
|
"deprecation_date": "2026-05-14",
|
|
"cache_creation_input_token_cost": 3.75e-06,
|
|
"cache_creation_input_token_cost_above_1hr": 6e-06,
|
|
"cache_read_input_token_cost": 3e-07,
|
|
"input_cost_per_token": 3e-06,
|
|
"input_cost_per_token_above_200k_tokens": 6e-06,
|
|
"output_cost_per_token_above_200k_tokens": 2.25e-05,
|
|
"cache_creation_input_token_cost_above_200k_tokens": 7.5e-06,
|
|
"cache_read_input_token_cost_above_200k_tokens": 6e-07,
|
|
"litellm_provider": "anthropic",
|
|
"max_input_tokens": 1000000,
|
|
"max_output_tokens": 64000,
|
|
"max_tokens": 64000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"search_context_cost_per_query": {
|
|
"search_context_size_high": 0.01,
|
|
"search_context_size_low": 0.01,
|
|
"search_context_size_medium": 0.01
|
|
},
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tool_use_system_prompt_tokens": 159
|
|
},
|
|
"cloudflare/@cf/meta/llama-2-7b-chat-fp16": {
|
|
"input_cost_per_token": 1.923e-06,
|
|
"litellm_provider": "cloudflare",
|
|
"max_input_tokens": 3072,
|
|
"max_output_tokens": 3072,
|
|
"max_tokens": 3072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.923e-06
|
|
},
|
|
"cloudflare/@cf/meta/llama-2-7b-chat-int8": {
|
|
"input_cost_per_token": 1.923e-06,
|
|
"litellm_provider": "cloudflare",
|
|
"max_input_tokens": 2048,
|
|
"max_output_tokens": 2048,
|
|
"max_tokens": 2048,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.923e-06
|
|
},
|
|
"cloudflare/@cf/mistral/mistral-7b-instruct-v0.1": {
|
|
"input_cost_per_token": 1.923e-06,
|
|
"litellm_provider": "cloudflare",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.923e-06
|
|
},
|
|
"cloudflare/@hf/thebloke/codellama-7b-instruct-awq": {
|
|
"input_cost_per_token": 1.923e-06,
|
|
"litellm_provider": "cloudflare",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.923e-06
|
|
},
|
|
"code-bison": {
|
|
"input_cost_per_character": 2.5e-07,
|
|
"input_cost_per_token": 1.25e-07,
|
|
"litellm_provider": "vertex_ai-code-text-models",
|
|
"max_input_tokens": 6144,
|
|
"max_output_tokens": 1024,
|
|
"max_tokens": 1024,
|
|
"mode": "chat",
|
|
"output_cost_per_character": 5e-07,
|
|
"output_cost_per_token": 1.25e-07,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models",
|
|
"supports_tool_choice": true
|
|
},
|
|
"code-bison-32k@002": {
|
|
"input_cost_per_character": 2.5e-07,
|
|
"input_cost_per_token": 1.25e-07,
|
|
"litellm_provider": "vertex_ai-code-text-models",
|
|
"max_input_tokens": 6144,
|
|
"max_output_tokens": 1024,
|
|
"max_tokens": 1024,
|
|
"mode": "completion",
|
|
"output_cost_per_character": 5e-07,
|
|
"output_cost_per_token": 1.25e-07,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models"
|
|
},
|
|
"code-bison32k": {
|
|
"input_cost_per_character": 2.5e-07,
|
|
"input_cost_per_token": 1.25e-07,
|
|
"litellm_provider": "vertex_ai-code-text-models",
|
|
"max_input_tokens": 6144,
|
|
"max_output_tokens": 1024,
|
|
"max_tokens": 1024,
|
|
"mode": "completion",
|
|
"output_cost_per_character": 5e-07,
|
|
"output_cost_per_token": 1.25e-07,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models"
|
|
},
|
|
"code-bison@001": {
|
|
"input_cost_per_character": 2.5e-07,
|
|
"input_cost_per_token": 1.25e-07,
|
|
"litellm_provider": "vertex_ai-code-text-models",
|
|
"max_input_tokens": 6144,
|
|
"max_output_tokens": 1024,
|
|
"max_tokens": 1024,
|
|
"mode": "completion",
|
|
"output_cost_per_character": 5e-07,
|
|
"output_cost_per_token": 1.25e-07,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models"
|
|
},
|
|
"code-bison@002": {
|
|
"input_cost_per_character": 2.5e-07,
|
|
"input_cost_per_token": 1.25e-07,
|
|
"litellm_provider": "vertex_ai-code-text-models",
|
|
"max_input_tokens": 6144,
|
|
"max_output_tokens": 1024,
|
|
"max_tokens": 1024,
|
|
"mode": "completion",
|
|
"output_cost_per_character": 5e-07,
|
|
"output_cost_per_token": 1.25e-07,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models"
|
|
},
|
|
"code-gecko": {
|
|
"input_cost_per_token": 1.25e-07,
|
|
"litellm_provider": "vertex_ai-code-text-models",
|
|
"max_input_tokens": 2048,
|
|
"max_output_tokens": 64,
|
|
"max_tokens": 64,
|
|
"mode": "completion",
|
|
"output_cost_per_token": 1.25e-07,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models"
|
|
},
|
|
"code-gecko-latest": {
|
|
"input_cost_per_token": 1.25e-07,
|
|
"litellm_provider": "vertex_ai-code-text-models",
|
|
"max_input_tokens": 2048,
|
|
"max_output_tokens": 64,
|
|
"max_tokens": 64,
|
|
"mode": "completion",
|
|
"output_cost_per_token": 1.25e-07,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models"
|
|
},
|
|
"code-gecko@001": {
|
|
"input_cost_per_token": 1.25e-07,
|
|
"litellm_provider": "vertex_ai-code-text-models",
|
|
"max_input_tokens": 2048,
|
|
"max_output_tokens": 64,
|
|
"max_tokens": 64,
|
|
"mode": "completion",
|
|
"output_cost_per_token": 1.25e-07,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models"
|
|
},
|
|
"code-gecko@002": {
|
|
"input_cost_per_token": 1.25e-07,
|
|
"litellm_provider": "vertex_ai-code-text-models",
|
|
"max_input_tokens": 2048,
|
|
"max_output_tokens": 64,
|
|
"max_tokens": 64,
|
|
"mode": "completion",
|
|
"output_cost_per_token": 1.25e-07,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models"
|
|
},
|
|
"codechat-bison": {
|
|
"input_cost_per_character": 2.5e-07,
|
|
"input_cost_per_token": 1.25e-07,
|
|
"litellm_provider": "vertex_ai-code-chat-models",
|
|
"max_input_tokens": 6144,
|
|
"max_output_tokens": 1024,
|
|
"max_tokens": 1024,
|
|
"mode": "chat",
|
|
"output_cost_per_character": 5e-07,
|
|
"output_cost_per_token": 1.25e-07,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models",
|
|
"supports_tool_choice": true
|
|
},
|
|
"codechat-bison-32k": {
|
|
"input_cost_per_character": 2.5e-07,
|
|
"input_cost_per_token": 1.25e-07,
|
|
"litellm_provider": "vertex_ai-code-chat-models",
|
|
"max_input_tokens": 32000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_character": 5e-07,
|
|
"output_cost_per_token": 1.25e-07,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models",
|
|
"supports_tool_choice": true
|
|
},
|
|
"codechat-bison-32k@002": {
|
|
"input_cost_per_character": 2.5e-07,
|
|
"input_cost_per_token": 1.25e-07,
|
|
"litellm_provider": "vertex_ai-code-chat-models",
|
|
"max_input_tokens": 32000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_character": 5e-07,
|
|
"output_cost_per_token": 1.25e-07,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models",
|
|
"supports_tool_choice": true
|
|
},
|
|
"codechat-bison@001": {
|
|
"input_cost_per_character": 2.5e-07,
|
|
"input_cost_per_token": 1.25e-07,
|
|
"litellm_provider": "vertex_ai-code-chat-models",
|
|
"max_input_tokens": 6144,
|
|
"max_output_tokens": 1024,
|
|
"max_tokens": 1024,
|
|
"mode": "chat",
|
|
"output_cost_per_character": 5e-07,
|
|
"output_cost_per_token": 1.25e-07,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models",
|
|
"supports_tool_choice": true
|
|
},
|
|
"codechat-bison@002": {
|
|
"input_cost_per_character": 2.5e-07,
|
|
"input_cost_per_token": 1.25e-07,
|
|
"litellm_provider": "vertex_ai-code-chat-models",
|
|
"max_input_tokens": 6144,
|
|
"max_output_tokens": 1024,
|
|
"max_tokens": 1024,
|
|
"mode": "chat",
|
|
"output_cost_per_character": 5e-07,
|
|
"output_cost_per_token": 1.25e-07,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models",
|
|
"supports_tool_choice": true
|
|
},
|
|
"codechat-bison@latest": {
|
|
"input_cost_per_character": 2.5e-07,
|
|
"input_cost_per_token": 1.25e-07,
|
|
"litellm_provider": "vertex_ai-code-chat-models",
|
|
"max_input_tokens": 6144,
|
|
"max_output_tokens": 1024,
|
|
"max_tokens": 1024,
|
|
"mode": "chat",
|
|
"output_cost_per_character": 5e-07,
|
|
"output_cost_per_token": 1.25e-07,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models",
|
|
"supports_tool_choice": true
|
|
},
|
|
"codestral/codestral-2405": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "codestral",
|
|
"max_input_tokens": 32000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.0,
|
|
"source": "https://docs.mistral.ai/capabilities/code_generation/",
|
|
"supports_assistant_prefill": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"codestral/codestral-latest": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "codestral",
|
|
"max_input_tokens": 32000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.0,
|
|
"source": "https://docs.mistral.ai/capabilities/code_generation/",
|
|
"supports_assistant_prefill": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"codex-mini-latest": {
|
|
"cache_read_input_token_cost": 3.75e-07,
|
|
"input_cost_per_token": 1.5e-06,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 100000,
|
|
"max_tokens": 100000,
|
|
"mode": "responses",
|
|
"output_cost_per_token": 6e-06,
|
|
"supported_endpoints": [
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"cohere.command-light-text-v14": {
|
|
"input_cost_per_token": 3e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-07,
|
|
"supports_tool_choice": true
|
|
},
|
|
"cohere.command-r-plus-v1:0": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"supports_tool_choice": true
|
|
},
|
|
"cohere.command-r-v1:0": {
|
|
"input_cost_per_token": 5e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-06,
|
|
"supports_tool_choice": true
|
|
},
|
|
"cohere.command-text-v14": {
|
|
"input_cost_per_token": 1.5e-06,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2e-06,
|
|
"supports_tool_choice": true
|
|
},
|
|
"cohere.embed-english-v3": {
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 512,
|
|
"max_tokens": 512,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0,
|
|
"supports_embedding_image_input": true
|
|
},
|
|
"cohere.embed-multilingual-v3": {
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 512,
|
|
"max_tokens": 512,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0,
|
|
"supports_embedding_image_input": true
|
|
},
|
|
"cohere.embed-v4:0": {
|
|
"input_cost_per_token": 1.2e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0,
|
|
"output_vector_size": 1536,
|
|
"supports_embedding_image_input": true
|
|
},
|
|
"cohere/embed-v4.0": {
|
|
"input_cost_per_token": 1.2e-07,
|
|
"litellm_provider": "cohere",
|
|
"max_input_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0,
|
|
"output_vector_size": 1536,
|
|
"supports_embedding_image_input": true
|
|
},
|
|
"cohere.rerank-v3-5:0": {
|
|
"input_cost_per_query": 0.002,
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "bedrock",
|
|
"max_document_chunks_per_query": 100,
|
|
"max_input_tokens": 32000,
|
|
"max_output_tokens": 32000,
|
|
"max_query_tokens": 32000,
|
|
"max_tokens": 32000,
|
|
"max_tokens_per_document_chunk": 512,
|
|
"mode": "rerank",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"command": {
|
|
"input_cost_per_token": 1e-06,
|
|
"litellm_provider": "cohere",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "completion",
|
|
"output_cost_per_token": 2e-06
|
|
},
|
|
"command-a-03-2025": {
|
|
"input_cost_per_token": 2.5e-06,
|
|
"litellm_provider": "cohere_chat",
|
|
"max_input_tokens": 256000,
|
|
"max_output_tokens": 8000,
|
|
"max_tokens": 8000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-05,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"command-light": {
|
|
"input_cost_per_token": 3e-07,
|
|
"litellm_provider": "cohere_chat",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-07,
|
|
"supports_tool_choice": true
|
|
},
|
|
"command-nightly": {
|
|
"input_cost_per_token": 1e-06,
|
|
"litellm_provider": "cohere",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "completion",
|
|
"output_cost_per_token": 2e-06
|
|
},
|
|
"command-r": {
|
|
"input_cost_per_token": 1.5e-07,
|
|
"litellm_provider": "cohere_chat",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-07,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"command-r-08-2024": {
|
|
"input_cost_per_token": 1.5e-07,
|
|
"litellm_provider": "cohere_chat",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-07,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"command-r-plus": {
|
|
"input_cost_per_token": 2.5e-06,
|
|
"litellm_provider": "cohere_chat",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-05,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"command-r-plus-08-2024": {
|
|
"input_cost_per_token": 2.5e-06,
|
|
"litellm_provider": "cohere_chat",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-05,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"command-r7b-12-2024": {
|
|
"input_cost_per_token": 1.5e-07,
|
|
"litellm_provider": "cohere_chat",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3.75e-08,
|
|
"source": "https://docs.cohere.com/v2/docs/command-r7b",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"computer-use-preview": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "azure",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 1024,
|
|
"max_tokens": 1024,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.2e-05,
|
|
"supported_endpoints": [
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": false,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"deepseek-chat": {
|
|
"cache_read_input_token_cost": 6e-08,
|
|
"input_cost_per_token": 6e-07,
|
|
"litellm_provider": "deepseek",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.7e-06,
|
|
"source": "https://api-docs.deepseek.com/quick_start/pricing",
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"deepseek-reasoner": {
|
|
"cache_read_input_token_cost": 6e-08,
|
|
"input_cost_per_token": 6e-07,
|
|
"litellm_provider": "deepseek",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 65536,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.7e-06,
|
|
"source": "https://api-docs.deepseek.com/quick_start/pricing",
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions"
|
|
],
|
|
"supports_function_calling": false,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": false,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": false
|
|
},
|
|
"dashscope/qwen-coder": {
|
|
"input_cost_per_token": 3e-07,
|
|
"litellm_provider": "dashscope",
|
|
"max_input_tokens": 1000000,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 1000000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-06,
|
|
"source": "https://www.alibabacloud.com/help/en/model-studio/models",
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"dashscope/qwen-flash": {
|
|
"litellm_provider": "dashscope",
|
|
"max_input_tokens": 997952,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 1000000,
|
|
"mode": "chat",
|
|
"source": "https://www.alibabacloud.com/help/en/model-studio/models",
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true,
|
|
"tiered_pricing": [
|
|
{
|
|
"input_cost_per_token": 5e-08,
|
|
"output_cost_per_token": 4e-07,
|
|
"range": [
|
|
0,
|
|
256000.0
|
|
]
|
|
},
|
|
{
|
|
"input_cost_per_token": 2.5e-07,
|
|
"output_cost_per_token": 2e-06,
|
|
"range": [
|
|
256000.0,
|
|
1000000.0
|
|
]
|
|
}
|
|
]
|
|
},
|
|
"dashscope/qwen-flash-2025-07-28": {
|
|
"litellm_provider": "dashscope",
|
|
"max_input_tokens": 997952,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 1000000,
|
|
"mode": "chat",
|
|
"source": "https://www.alibabacloud.com/help/en/model-studio/models",
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true,
|
|
"tiered_pricing": [
|
|
{
|
|
"input_cost_per_token": 5e-08,
|
|
"output_cost_per_token": 4e-07,
|
|
"range": [
|
|
0,
|
|
256000.0
|
|
]
|
|
},
|
|
{
|
|
"input_cost_per_token": 2.5e-07,
|
|
"output_cost_per_token": 2e-06,
|
|
"range": [
|
|
256000.0,
|
|
1000000.0
|
|
]
|
|
}
|
|
]
|
|
},
|
|
"dashscope/qwen-max": {
|
|
"input_cost_per_token": 1.6e-06,
|
|
"litellm_provider": "dashscope",
|
|
"max_input_tokens": 30720,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6.4e-06,
|
|
"source": "https://www.alibabacloud.com/help/en/model-studio/models",
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"dashscope/qwen-plus": {
|
|
"input_cost_per_token": 4e-07,
|
|
"litellm_provider": "dashscope",
|
|
"max_input_tokens": 129024,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.2e-06,
|
|
"source": "https://www.alibabacloud.com/help/en/model-studio/models",
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"dashscope/qwen-plus-2025-01-25": {
|
|
"input_cost_per_token": 4e-07,
|
|
"litellm_provider": "dashscope",
|
|
"max_input_tokens": 129024,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.2e-06,
|
|
"source": "https://www.alibabacloud.com/help/en/model-studio/models",
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"dashscope/qwen-plus-2025-04-28": {
|
|
"input_cost_per_token": 4e-07,
|
|
"litellm_provider": "dashscope",
|
|
"max_input_tokens": 129024,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_reasoning_token": 4e-06,
|
|
"output_cost_per_token": 1.2e-06,
|
|
"source": "https://www.alibabacloud.com/help/en/model-studio/models",
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"dashscope/qwen-plus-2025-07-14": {
|
|
"input_cost_per_token": 4e-07,
|
|
"litellm_provider": "dashscope",
|
|
"max_input_tokens": 129024,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_reasoning_token": 4e-06,
|
|
"output_cost_per_token": 1.2e-06,
|
|
"source": "https://www.alibabacloud.com/help/en/model-studio/models",
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"dashscope/qwen-plus-2025-07-28": {
|
|
"litellm_provider": "dashscope",
|
|
"max_input_tokens": 997952,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 1000000,
|
|
"mode": "chat",
|
|
"source": "https://www.alibabacloud.com/help/en/model-studio/models",
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true,
|
|
"tiered_pricing": [
|
|
{
|
|
"input_cost_per_token": 4e-07,
|
|
"output_cost_per_reasoning_token": 4e-06,
|
|
"output_cost_per_token": 1.2e-06,
|
|
"range": [
|
|
0,
|
|
256000.0
|
|
]
|
|
},
|
|
{
|
|
"input_cost_per_token": 1.2e-06,
|
|
"output_cost_per_reasoning_token": 1.2e-05,
|
|
"output_cost_per_token": 3.6e-06,
|
|
"range": [
|
|
256000.0,
|
|
1000000.0
|
|
]
|
|
}
|
|
]
|
|
},
|
|
"dashscope/qwen-plus-2025-09-11": {
|
|
"litellm_provider": "dashscope",
|
|
"max_input_tokens": 997952,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 1000000,
|
|
"mode": "chat",
|
|
"source": "https://www.alibabacloud.com/help/en/model-studio/models",
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true,
|
|
"tiered_pricing": [
|
|
{
|
|
"input_cost_per_token": 4e-07,
|
|
"output_cost_per_reasoning_token": 4e-06,
|
|
"output_cost_per_token": 1.2e-06,
|
|
"range": [
|
|
0,
|
|
256000.0
|
|
]
|
|
},
|
|
{
|
|
"input_cost_per_token": 1.2e-06,
|
|
"output_cost_per_reasoning_token": 1.2e-05,
|
|
"output_cost_per_token": 3.6e-06,
|
|
"range": [
|
|
256000.0,
|
|
1000000.0
|
|
]
|
|
}
|
|
]
|
|
},
|
|
"dashscope/qwen-plus-latest": {
|
|
"litellm_provider": "dashscope",
|
|
"max_input_tokens": 997952,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 1000000,
|
|
"mode": "chat",
|
|
"source": "https://www.alibabacloud.com/help/en/model-studio/models",
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true,
|
|
"tiered_pricing": [
|
|
{
|
|
"input_cost_per_token": 4e-07,
|
|
"output_cost_per_reasoning_token": 4e-06,
|
|
"output_cost_per_token": 1.2e-06,
|
|
"range": [
|
|
0,
|
|
256000.0
|
|
]
|
|
},
|
|
{
|
|
"input_cost_per_token": 1.2e-06,
|
|
"output_cost_per_reasoning_token": 1.2e-05,
|
|
"output_cost_per_token": 3.6e-06,
|
|
"range": [
|
|
256000.0,
|
|
1000000.0
|
|
]
|
|
}
|
|
]
|
|
},
|
|
"dashscope/qwen-turbo": {
|
|
"input_cost_per_token": 5e-08,
|
|
"litellm_provider": "dashscope",
|
|
"max_input_tokens": 129024,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_reasoning_token": 5e-07,
|
|
"output_cost_per_token": 2e-07,
|
|
"source": "https://www.alibabacloud.com/help/en/model-studio/models",
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"dashscope/qwen-turbo-2024-11-01": {
|
|
"input_cost_per_token": 5e-08,
|
|
"litellm_provider": "dashscope",
|
|
"max_input_tokens": 1000000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 1000000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2e-07,
|
|
"source": "https://www.alibabacloud.com/help/en/model-studio/models",
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"dashscope/qwen-turbo-2025-04-28": {
|
|
"input_cost_per_token": 5e-08,
|
|
"litellm_provider": "dashscope",
|
|
"max_input_tokens": 1000000,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 1000000,
|
|
"mode": "chat",
|
|
"output_cost_per_reasoning_token": 5e-07,
|
|
"output_cost_per_token": 2e-07,
|
|
"source": "https://www.alibabacloud.com/help/en/model-studio/models",
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"dashscope/qwen-turbo-latest": {
|
|
"input_cost_per_token": 5e-08,
|
|
"litellm_provider": "dashscope",
|
|
"max_input_tokens": 1000000,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 1000000,
|
|
"mode": "chat",
|
|
"output_cost_per_reasoning_token": 5e-07,
|
|
"output_cost_per_token": 2e-07,
|
|
"source": "https://www.alibabacloud.com/help/en/model-studio/models",
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"dashscope/qwen3-30b-a3b": {
|
|
"litellm_provider": "dashscope",
|
|
"max_input_tokens": 129024,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"source": "https://www.alibabacloud.com/help/en/model-studio/models",
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"dashscope/qwen3-coder-flash": {
|
|
"litellm_provider": "dashscope",
|
|
"max_input_tokens": 997952,
|
|
"max_output_tokens": 65536,
|
|
"max_tokens": 1000000,
|
|
"mode": "chat",
|
|
"source": "https://www.alibabacloud.com/help/en/model-studio/models",
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true,
|
|
"tiered_pricing": [
|
|
{
|
|
"cache_read_input_token_cost": 8e-08,
|
|
"input_cost_per_token": 3e-07,
|
|
"output_cost_per_token": 1.5e-06,
|
|
"range": [
|
|
0,
|
|
32000.0
|
|
]
|
|
},
|
|
{
|
|
"cache_read_input_token_cost": 1.2e-07,
|
|
"input_cost_per_token": 5e-07,
|
|
"output_cost_per_token": 2.5e-06,
|
|
"range": [
|
|
32000.0,
|
|
128000.0
|
|
]
|
|
},
|
|
{
|
|
"cache_read_input_token_cost": 2e-07,
|
|
"input_cost_per_token": 8e-07,
|
|
"output_cost_per_token": 4e-06,
|
|
"range": [
|
|
128000.0,
|
|
256000.0
|
|
]
|
|
},
|
|
{
|
|
"cache_read_input_token_cost": 4e-07,
|
|
"input_cost_per_token": 1.6e-06,
|
|
"output_cost_per_token": 9.6e-06,
|
|
"range": [
|
|
256000.0,
|
|
1000000.0
|
|
]
|
|
}
|
|
]
|
|
},
|
|
"dashscope/qwen3-coder-flash-2025-07-28": {
|
|
"litellm_provider": "dashscope",
|
|
"max_input_tokens": 997952,
|
|
"max_output_tokens": 65536,
|
|
"max_tokens": 1000000,
|
|
"mode": "chat",
|
|
"source": "https://www.alibabacloud.com/help/en/model-studio/models",
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true,
|
|
"tiered_pricing": [
|
|
{
|
|
"input_cost_per_token": 3e-07,
|
|
"output_cost_per_token": 1.5e-06,
|
|
"range": [
|
|
0,
|
|
32000.0
|
|
]
|
|
},
|
|
{
|
|
"input_cost_per_token": 5e-07,
|
|
"output_cost_per_token": 2.5e-06,
|
|
"range": [
|
|
32000.0,
|
|
128000.0
|
|
]
|
|
},
|
|
{
|
|
"input_cost_per_token": 8e-07,
|
|
"output_cost_per_token": 4e-06,
|
|
"range": [
|
|
128000.0,
|
|
256000.0
|
|
]
|
|
},
|
|
{
|
|
"input_cost_per_token": 1.6e-06,
|
|
"output_cost_per_token": 9.6e-06,
|
|
"range": [
|
|
256000.0,
|
|
1000000.0
|
|
]
|
|
}
|
|
]
|
|
},
|
|
"dashscope/qwen3-coder-plus": {
|
|
"litellm_provider": "dashscope",
|
|
"max_input_tokens": 997952,
|
|
"max_output_tokens": 65536,
|
|
"max_tokens": 1000000,
|
|
"mode": "chat",
|
|
"source": "https://www.alibabacloud.com/help/en/model-studio/models",
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true,
|
|
"tiered_pricing": [
|
|
{
|
|
"cache_read_input_token_cost": 1e-07,
|
|
"input_cost_per_token": 1e-06,
|
|
"output_cost_per_token": 5e-06,
|
|
"range": [
|
|
0,
|
|
32000.0
|
|
]
|
|
},
|
|
{
|
|
"cache_read_input_token_cost": 1.8e-07,
|
|
"input_cost_per_token": 1.8e-06,
|
|
"output_cost_per_token": 9e-06,
|
|
"range": [
|
|
32000.0,
|
|
128000.0
|
|
]
|
|
},
|
|
{
|
|
"cache_read_input_token_cost": 3e-07,
|
|
"input_cost_per_token": 3e-06,
|
|
"output_cost_per_token": 1.5e-05,
|
|
"range": [
|
|
128000.0,
|
|
256000.0
|
|
]
|
|
},
|
|
{
|
|
"cache_read_input_token_cost": 6e-07,
|
|
"input_cost_per_token": 6e-06,
|
|
"output_cost_per_token": 6e-05,
|
|
"range": [
|
|
256000.0,
|
|
1000000.0
|
|
]
|
|
}
|
|
]
|
|
},
|
|
"dashscope/qwen3-coder-plus-2025-07-22": {
|
|
"litellm_provider": "dashscope",
|
|
"max_input_tokens": 997952,
|
|
"max_output_tokens": 65536,
|
|
"max_tokens": 1000000,
|
|
"mode": "chat",
|
|
"source": "https://www.alibabacloud.com/help/en/model-studio/models",
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true,
|
|
"tiered_pricing": [
|
|
{
|
|
"input_cost_per_token": 1e-06,
|
|
"output_cost_per_token": 5e-06,
|
|
"range": [
|
|
0,
|
|
32000.0
|
|
]
|
|
},
|
|
{
|
|
"input_cost_per_token": 1.8e-06,
|
|
"output_cost_per_token": 9e-06,
|
|
"range": [
|
|
32000.0,
|
|
128000.0
|
|
]
|
|
},
|
|
{
|
|
"input_cost_per_token": 3e-06,
|
|
"output_cost_per_token": 1.5e-05,
|
|
"range": [
|
|
128000.0,
|
|
256000.0
|
|
]
|
|
},
|
|
{
|
|
"input_cost_per_token": 6e-06,
|
|
"output_cost_per_token": 6e-05,
|
|
"range": [
|
|
256000.0,
|
|
1000000.0
|
|
]
|
|
}
|
|
]
|
|
},
|
|
"dashscope/qwen3-max-preview": {
|
|
"litellm_provider": "dashscope",
|
|
"max_input_tokens": 258048,
|
|
"max_output_tokens": 65536,
|
|
"max_tokens": 262144,
|
|
"mode": "chat",
|
|
"source": "https://www.alibabacloud.com/help/en/model-studio/models",
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true,
|
|
"tiered_pricing": [
|
|
{
|
|
"input_cost_per_token": 1.2e-06,
|
|
"output_cost_per_token": 6e-06,
|
|
"range": [
|
|
0,
|
|
32000.0
|
|
]
|
|
},
|
|
{
|
|
"input_cost_per_token": 2.4e-06,
|
|
"output_cost_per_token": 1.2e-05,
|
|
"range": [
|
|
32000.0,
|
|
128000.0
|
|
]
|
|
},
|
|
{
|
|
"input_cost_per_token": 3e-06,
|
|
"output_cost_per_token": 1.5e-05,
|
|
"range": [
|
|
128000.0,
|
|
252000.0
|
|
]
|
|
}
|
|
]
|
|
},
|
|
"dashscope/qwq-plus": {
|
|
"input_cost_per_token": 8e-07,
|
|
"litellm_provider": "dashscope",
|
|
"max_input_tokens": 98304,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.4e-06,
|
|
"source": "https://www.alibabacloud.com/help/en/model-studio/models",
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"databricks/databricks-bge-large-en": {
|
|
"input_cost_per_token": 1.0003e-07,
|
|
"input_dbu_cost_per_token": 1.429e-06,
|
|
"litellm_provider": "databricks",
|
|
"max_input_tokens": 512,
|
|
"max_tokens": 512,
|
|
"metadata": {
|
|
"notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation."
|
|
},
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0,
|
|
"output_dbu_cost_per_token": 0.0,
|
|
"output_vector_size": 1024,
|
|
"source": "https://www.databricks.com/product/pricing/foundation-model-serving"
|
|
},
|
|
"databricks/databricks-claude-3-7-sonnet": {
|
|
"input_cost_per_token": 2.9999900000000002e-06,
|
|
"input_dbu_cost_per_token": 4.2857e-05,
|
|
"litellm_provider": "databricks",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 200000,
|
|
"metadata": {
|
|
"notes": "Input/output cost per token is dbu cost * $0.070. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation."
|
|
},
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5000020000000002e-05,
|
|
"output_dbu_cost_per_token": 0.000214286,
|
|
"source": "https://www.databricks.com/product/pricing/proprietary-foundation-model-serving",
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"databricks/databricks-claude-haiku-4-5": {
|
|
"input_cost_per_token": 1.00002e-06,
|
|
"input_dbu_cost_per_token": 1.4286e-05,
|
|
"litellm_provider": "databricks",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 64000,
|
|
"max_tokens": 200000,
|
|
"metadata": {
|
|
"notes": "Input/output cost per token is dbu cost * $0.070. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation."
|
|
},
|
|
"mode": "chat",
|
|
"output_cost_per_token": 5.00003e-06,
|
|
"output_dbu_cost_per_token": 7.1429e-05,
|
|
"source": "https://www.databricks.com/product/pricing/proprietary-foundation-model-serving",
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"databricks/databricks-claude-opus-4": {
|
|
"input_cost_per_token": 1.5000020000000002e-05,
|
|
"input_dbu_cost_per_token": 0.000214286,
|
|
"litellm_provider": "databricks",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 32000,
|
|
"max_tokens": 200000,
|
|
"metadata": {
|
|
"notes": "Input/output cost per token is dbu cost * $0.070. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation."
|
|
},
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7.500003000000001e-05,
|
|
"output_dbu_cost_per_token": 0.001071429,
|
|
"source": "https://www.databricks.com/product/pricing/proprietary-foundation-model-serving",
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"databricks/databricks-claude-opus-4-1": {
|
|
"input_cost_per_token": 1.5000020000000002e-05,
|
|
"input_dbu_cost_per_token": 0.000214286,
|
|
"litellm_provider": "databricks",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 32000,
|
|
"max_tokens": 200000,
|
|
"metadata": {
|
|
"notes": "Input/output cost per token is dbu cost * $0.070. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation."
|
|
},
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7.500003000000001e-05,
|
|
"output_dbu_cost_per_token": 0.001071429,
|
|
"source": "https://www.databricks.com/product/pricing/proprietary-foundation-model-serving",
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"databricks/databricks-claude-opus-4-5": {
|
|
"input_cost_per_token": 5.00003e-06,
|
|
"input_dbu_cost_per_token": 7.1429e-05,
|
|
"litellm_provider": "databricks",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 64000,
|
|
"max_tokens": 200000,
|
|
"metadata": {
|
|
"notes": "Input/output cost per token is dbu cost * $0.070. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation."
|
|
},
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.5000010000000002e-05,
|
|
"output_dbu_cost_per_token": 0.000357143,
|
|
"source": "https://www.databricks.com/product/pricing/proprietary-foundation-model-serving",
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"databricks/databricks-claude-sonnet-4": {
|
|
"input_cost_per_token": 2.9999900000000002e-06,
|
|
"input_dbu_cost_per_token": 4.2857e-05,
|
|
"litellm_provider": "databricks",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 64000,
|
|
"max_tokens": 200000,
|
|
"metadata": {
|
|
"notes": "Input/output cost per token is dbu cost * $0.070. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation."
|
|
},
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5000020000000002e-05,
|
|
"output_dbu_cost_per_token": 0.000214286,
|
|
"source": "https://www.databricks.com/product/pricing/proprietary-foundation-model-serving",
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"databricks/databricks-claude-sonnet-4-1": {
|
|
"input_cost_per_token": 2.9999900000000002e-06,
|
|
"input_dbu_cost_per_token": 4.2857e-05,
|
|
"litellm_provider": "databricks",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 64000,
|
|
"max_tokens": 200000,
|
|
"metadata": {
|
|
"notes": "Input/output cost per token is dbu cost * $0.070. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation."
|
|
},
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5000020000000002e-05,
|
|
"output_dbu_cost_per_token": 0.000214286,
|
|
"source": "https://www.databricks.com/product/pricing/proprietary-foundation-model-serving",
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"databricks/databricks-claude-sonnet-4-5": {
|
|
"input_cost_per_token": 2.9999900000000002e-06,
|
|
"input_dbu_cost_per_token": 4.2857e-05,
|
|
"litellm_provider": "databricks",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 64000,
|
|
"max_tokens": 200000,
|
|
"metadata": {
|
|
"notes": "Input/output cost per token is dbu cost * $0.070. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation."
|
|
},
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5000020000000002e-05,
|
|
"output_dbu_cost_per_token": 0.000214286,
|
|
"source": "https://www.databricks.com/product/pricing/proprietary-foundation-model-serving",
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"databricks/databricks-gemini-2-5-flash": {
|
|
"input_cost_per_token": 3.0001999999999996e-07,
|
|
"input_dbu_cost_per_token": 4.285999999999999e-06,
|
|
"litellm_provider": "databricks",
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 65535,
|
|
"max_tokens": 1048576,
|
|
"metadata": {
|
|
"notes": "Input/output cost per token is dbu cost * $0.070. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation."
|
|
},
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.49998e-06,
|
|
"output_dbu_cost_per_token": 3.5714e-05,
|
|
"source": "https://www.databricks.com/product/pricing/proprietary-foundation-model-serving",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"databricks/databricks-gemini-2-5-pro": {
|
|
"input_cost_per_token": 1.24999e-06,
|
|
"input_dbu_cost_per_token": 1.7857e-05,
|
|
"litellm_provider": "databricks",
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 65536,
|
|
"max_tokens": 1048576,
|
|
"metadata": {
|
|
"notes": "Input/output cost per token is dbu cost * $0.070. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation."
|
|
},
|
|
"mode": "chat",
|
|
"output_cost_per_token": 9.999990000000002e-06,
|
|
"output_dbu_cost_per_token": 0.000142857,
|
|
"source": "https://www.databricks.com/product/pricing/proprietary-foundation-model-serving",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"databricks/databricks-gemma-3-12b": {
|
|
"input_cost_per_token": 1.5000999999999998e-07,
|
|
"input_dbu_cost_per_token": 2.1429999999999996e-06,
|
|
"litellm_provider": "databricks",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 32000,
|
|
"max_tokens": 128000,
|
|
"metadata": {
|
|
"notes": "Input/output cost per token is dbu cost * $0.070. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation."
|
|
},
|
|
"mode": "chat",
|
|
"output_cost_per_token": 5.0001e-07,
|
|
"output_dbu_cost_per_token": 7.143e-06,
|
|
"source": "https://www.databricks.com/product/pricing/foundation-model-serving"
|
|
},
|
|
"databricks/databricks-gpt-5": {
|
|
"input_cost_per_token": 1.24999e-06,
|
|
"input_dbu_cost_per_token": 1.7857e-05,
|
|
"litellm_provider": "databricks",
|
|
"max_input_tokens": 400000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 400000,
|
|
"metadata": {
|
|
"notes": "Input/output cost per token is dbu cost * $0.070. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation."
|
|
},
|
|
"mode": "chat",
|
|
"output_cost_per_token": 9.999990000000002e-06,
|
|
"output_dbu_cost_per_token": 0.000142857,
|
|
"source": "https://www.databricks.com/product/pricing/proprietary-foundation-model-serving"
|
|
},
|
|
"databricks/databricks-gpt-5-1": {
|
|
"input_cost_per_token": 1.24999e-06,
|
|
"input_dbu_cost_per_token": 1.7857e-05,
|
|
"litellm_provider": "databricks",
|
|
"max_input_tokens": 400000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 400000,
|
|
"metadata": {
|
|
"notes": "Input/output cost per token is dbu cost * $0.070. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation."
|
|
},
|
|
"mode": "chat",
|
|
"output_cost_per_token": 9.999990000000002e-06,
|
|
"output_dbu_cost_per_token": 0.000142857,
|
|
"source": "https://www.databricks.com/product/pricing/proprietary-foundation-model-serving"
|
|
},
|
|
"databricks/databricks-gpt-5-mini": {
|
|
"input_cost_per_token": 2.4997000000000006e-07,
|
|
"input_dbu_cost_per_token": 3.571e-06,
|
|
"litellm_provider": "databricks",
|
|
"max_input_tokens": 400000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 400000,
|
|
"metadata": {
|
|
"notes": "Input/output cost per token is dbu cost * $0.070. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation."
|
|
},
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.9999700000000004e-06,
|
|
"output_dbu_cost_per_token": 2.8571e-05,
|
|
"source": "https://www.databricks.com/product/pricing/proprietary-foundation-model-serving"
|
|
},
|
|
"databricks/databricks-gpt-5-nano": {
|
|
"input_cost_per_token": 4.998e-08,
|
|
"input_dbu_cost_per_token": 7.14e-07,
|
|
"litellm_provider": "databricks",
|
|
"max_input_tokens": 400000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 400000,
|
|
"metadata": {
|
|
"notes": "Input/output cost per token is dbu cost * $0.070. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation."
|
|
},
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3.9998000000000007e-07,
|
|
"output_dbu_cost_per_token": 5.714000000000001e-06,
|
|
"source": "https://www.databricks.com/product/pricing/proprietary-foundation-model-serving"
|
|
},
|
|
"databricks/databricks-gpt-oss-120b": {
|
|
"input_cost_per_token": 1.5000999999999998e-07,
|
|
"input_dbu_cost_per_token": 2.1429999999999996e-06,
|
|
"litellm_provider": "databricks",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"metadata": {
|
|
"notes": "Input/output cost per token is dbu cost * $0.070. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation."
|
|
},
|
|
"mode": "chat",
|
|
"output_cost_per_token": 5.9997e-07,
|
|
"output_dbu_cost_per_token": 8.571e-06,
|
|
"source": "https://www.databricks.com/product/pricing/foundation-model-serving"
|
|
},
|
|
"databricks/databricks-gpt-oss-20b": {
|
|
"input_cost_per_token": 7e-08,
|
|
"input_dbu_cost_per_token": 1e-06,
|
|
"litellm_provider": "databricks",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"metadata": {
|
|
"notes": "Input/output cost per token is dbu cost * $0.070. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation."
|
|
},
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3.0001999999999996e-07,
|
|
"output_dbu_cost_per_token": 4.285999999999999e-06,
|
|
"source": "https://www.databricks.com/product/pricing/foundation-model-serving"
|
|
},
|
|
"databricks/databricks-gte-large-en": {
|
|
"input_cost_per_token": 1.2999000000000001e-07,
|
|
"input_dbu_cost_per_token": 1.857e-06,
|
|
"litellm_provider": "databricks",
|
|
"max_input_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"metadata": {
|
|
"notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation."
|
|
},
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0,
|
|
"output_dbu_cost_per_token": 0.0,
|
|
"output_vector_size": 1024,
|
|
"source": "https://www.databricks.com/product/pricing/foundation-model-serving"
|
|
},
|
|
"databricks/databricks-llama-2-70b-chat": {
|
|
"input_cost_per_token": 5.0001e-07,
|
|
"input_dbu_cost_per_token": 7.143e-06,
|
|
"litellm_provider": "databricks",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"metadata": {
|
|
"notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation."
|
|
},
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5000300000000002e-06,
|
|
"output_dbu_cost_per_token": 2.1429e-05,
|
|
"source": "https://www.databricks.com/product/pricing/foundation-model-serving",
|
|
"supports_tool_choice": true
|
|
},
|
|
"databricks/databricks-llama-4-maverick": {
|
|
"input_cost_per_token": 5.0001e-07,
|
|
"input_dbu_cost_per_token": 7.143e-06,
|
|
"litellm_provider": "databricks",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"metadata": {
|
|
"notes": "Databricks documentation now provides both DBU costs (_dbu_cost_per_token) and dollar costs(_cost_per_token)."
|
|
},
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5000300000000002e-06,
|
|
"output_dbu_cost_per_token": 2.1429e-05,
|
|
"source": "https://www.databricks.com/product/pricing/foundation-model-serving",
|
|
"supports_tool_choice": true
|
|
},
|
|
"databricks/databricks-meta-llama-3-1-405b-instruct": {
|
|
"input_cost_per_token": 5.00003e-06,
|
|
"input_dbu_cost_per_token": 7.1429e-05,
|
|
"litellm_provider": "databricks",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"metadata": {
|
|
"notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation."
|
|
},
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5000020000000002e-05,
|
|
"output_dbu_cost_per_token": 0.000214286,
|
|
"source": "https://www.databricks.com/product/pricing/foundation-model-serving",
|
|
"supports_tool_choice": true
|
|
},
|
|
"databricks/databricks-meta-llama-3-1-8b-instruct": {
|
|
"input_cost_per_token": 1.5000999999999998e-07,
|
|
"input_dbu_cost_per_token": 2.1429999999999996e-06,
|
|
"litellm_provider": "databricks",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 200000,
|
|
"metadata": {
|
|
"notes": "Input/output cost per token is dbu cost * $0.070. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation."
|
|
},
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4.5003000000000007e-07,
|
|
"output_dbu_cost_per_token": 6.429000000000001e-06,
|
|
"source": "https://www.databricks.com/product/pricing/foundation-model-serving"
|
|
},
|
|
"databricks/databricks-meta-llama-3-3-70b-instruct": {
|
|
"input_cost_per_token": 5.0001e-07,
|
|
"input_dbu_cost_per_token": 7.143e-06,
|
|
"litellm_provider": "databricks",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"metadata": {
|
|
"notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation."
|
|
},
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5000300000000002e-06,
|
|
"output_dbu_cost_per_token": 2.1429e-05,
|
|
"source": "https://www.databricks.com/product/pricing/foundation-model-serving",
|
|
"supports_tool_choice": true
|
|
},
|
|
"databricks/databricks-meta-llama-3-70b-instruct": {
|
|
"input_cost_per_token": 1.00002e-06,
|
|
"input_dbu_cost_per_token": 1.4286e-05,
|
|
"litellm_provider": "databricks",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"metadata": {
|
|
"notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation."
|
|
},
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.9999900000000002e-06,
|
|
"output_dbu_cost_per_token": 4.2857e-05,
|
|
"source": "https://www.databricks.com/product/pricing/foundation-model-serving",
|
|
"supports_tool_choice": true
|
|
},
|
|
"databricks/databricks-mixtral-8x7b-instruct": {
|
|
"input_cost_per_token": 5.0001e-07,
|
|
"input_dbu_cost_per_token": 7.143e-06,
|
|
"litellm_provider": "databricks",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"metadata": {
|
|
"notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation."
|
|
},
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.00002e-06,
|
|
"output_dbu_cost_per_token": 1.4286e-05,
|
|
"source": "https://www.databricks.com/product/pricing/foundation-model-serving",
|
|
"supports_tool_choice": true
|
|
},
|
|
"databricks/databricks-mpt-30b-instruct": {
|
|
"input_cost_per_token": 1.00002e-06,
|
|
"input_dbu_cost_per_token": 1.4286e-05,
|
|
"litellm_provider": "databricks",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"metadata": {
|
|
"notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation."
|
|
},
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.00002e-06,
|
|
"output_dbu_cost_per_token": 1.4286e-05,
|
|
"source": "https://www.databricks.com/product/pricing/foundation-model-serving",
|
|
"supports_tool_choice": true
|
|
},
|
|
"databricks/databricks-mpt-7b-instruct": {
|
|
"input_cost_per_token": 5.0001e-07,
|
|
"input_dbu_cost_per_token": 7.143e-06,
|
|
"litellm_provider": "databricks",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"metadata": {
|
|
"notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation."
|
|
},
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.0,
|
|
"output_dbu_cost_per_token": 0.0,
|
|
"source": "https://www.databricks.com/product/pricing/foundation-model-serving",
|
|
"supports_tool_choice": true
|
|
},
|
|
"dataforseo/search": {
|
|
"input_cost_per_query": 0.003,
|
|
"litellm_provider": "dataforseo",
|
|
"mode": "search"
|
|
},
|
|
"davinci-002": {
|
|
"input_cost_per_token": 2e-06,
|
|
"litellm_provider": "text-completion-openai",
|
|
"max_input_tokens": 16384,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 16384,
|
|
"mode": "completion",
|
|
"output_cost_per_token": 2e-06
|
|
},
|
|
"deepgram/base": {
|
|
"input_cost_per_second": 0.00020833,
|
|
"litellm_provider": "deepgram",
|
|
"metadata": {
|
|
"calculation": "$0.0125/60 seconds = $0.00020833 per second",
|
|
"original_pricing_per_minute": 0.0125
|
|
},
|
|
"mode": "audio_transcription",
|
|
"output_cost_per_second": 0.0,
|
|
"source": "https://deepgram.com/pricing",
|
|
"supported_endpoints": [
|
|
"/v1/audio/transcriptions"
|
|
]
|
|
},
|
|
"deepgram/base-conversationalai": {
|
|
"input_cost_per_second": 0.00020833,
|
|
"litellm_provider": "deepgram",
|
|
"metadata": {
|
|
"calculation": "$0.0125/60 seconds = $0.00020833 per second",
|
|
"original_pricing_per_minute": 0.0125
|
|
},
|
|
"mode": "audio_transcription",
|
|
"output_cost_per_second": 0.0,
|
|
"source": "https://deepgram.com/pricing",
|
|
"supported_endpoints": [
|
|
"/v1/audio/transcriptions"
|
|
]
|
|
},
|
|
"deepgram/base-finance": {
|
|
"input_cost_per_second": 0.00020833,
|
|
"litellm_provider": "deepgram",
|
|
"metadata": {
|
|
"calculation": "$0.0125/60 seconds = $0.00020833 per second",
|
|
"original_pricing_per_minute": 0.0125
|
|
},
|
|
"mode": "audio_transcription",
|
|
"output_cost_per_second": 0.0,
|
|
"source": "https://deepgram.com/pricing",
|
|
"supported_endpoints": [
|
|
"/v1/audio/transcriptions"
|
|
]
|
|
},
|
|
"deepgram/base-general": {
|
|
"input_cost_per_second": 0.00020833,
|
|
"litellm_provider": "deepgram",
|
|
"metadata": {
|
|
"calculation": "$0.0125/60 seconds = $0.00020833 per second",
|
|
"original_pricing_per_minute": 0.0125
|
|
},
|
|
"mode": "audio_transcription",
|
|
"output_cost_per_second": 0.0,
|
|
"source": "https://deepgram.com/pricing",
|
|
"supported_endpoints": [
|
|
"/v1/audio/transcriptions"
|
|
]
|
|
},
|
|
"deepgram/base-meeting": {
|
|
"input_cost_per_second": 0.00020833,
|
|
"litellm_provider": "deepgram",
|
|
"metadata": {
|
|
"calculation": "$0.0125/60 seconds = $0.00020833 per second",
|
|
"original_pricing_per_minute": 0.0125
|
|
},
|
|
"mode": "audio_transcription",
|
|
"output_cost_per_second": 0.0,
|
|
"source": "https://deepgram.com/pricing",
|
|
"supported_endpoints": [
|
|
"/v1/audio/transcriptions"
|
|
]
|
|
},
|
|
"deepgram/base-phonecall": {
|
|
"input_cost_per_second": 0.00020833,
|
|
"litellm_provider": "deepgram",
|
|
"metadata": {
|
|
"calculation": "$0.0125/60 seconds = $0.00020833 per second",
|
|
"original_pricing_per_minute": 0.0125
|
|
},
|
|
"mode": "audio_transcription",
|
|
"output_cost_per_second": 0.0,
|
|
"source": "https://deepgram.com/pricing",
|
|
"supported_endpoints": [
|
|
"/v1/audio/transcriptions"
|
|
]
|
|
},
|
|
"deepgram/base-video": {
|
|
"input_cost_per_second": 0.00020833,
|
|
"litellm_provider": "deepgram",
|
|
"metadata": {
|
|
"calculation": "$0.0125/60 seconds = $0.00020833 per second",
|
|
"original_pricing_per_minute": 0.0125
|
|
},
|
|
"mode": "audio_transcription",
|
|
"output_cost_per_second": 0.0,
|
|
"source": "https://deepgram.com/pricing",
|
|
"supported_endpoints": [
|
|
"/v1/audio/transcriptions"
|
|
]
|
|
},
|
|
"deepgram/base-voicemail": {
|
|
"input_cost_per_second": 0.00020833,
|
|
"litellm_provider": "deepgram",
|
|
"metadata": {
|
|
"calculation": "$0.0125/60 seconds = $0.00020833 per second",
|
|
"original_pricing_per_minute": 0.0125
|
|
},
|
|
"mode": "audio_transcription",
|
|
"output_cost_per_second": 0.0,
|
|
"source": "https://deepgram.com/pricing",
|
|
"supported_endpoints": [
|
|
"/v1/audio/transcriptions"
|
|
]
|
|
},
|
|
"deepgram/enhanced": {
|
|
"input_cost_per_second": 0.00024167,
|
|
"litellm_provider": "deepgram",
|
|
"metadata": {
|
|
"calculation": "$0.0145/60 seconds = $0.00024167 per second",
|
|
"original_pricing_per_minute": 0.0145
|
|
},
|
|
"mode": "audio_transcription",
|
|
"output_cost_per_second": 0.0,
|
|
"source": "https://deepgram.com/pricing",
|
|
"supported_endpoints": [
|
|
"/v1/audio/transcriptions"
|
|
]
|
|
},
|
|
"deepgram/enhanced-finance": {
|
|
"input_cost_per_second": 0.00024167,
|
|
"litellm_provider": "deepgram",
|
|
"metadata": {
|
|
"calculation": "$0.0145/60 seconds = $0.00024167 per second",
|
|
"original_pricing_per_minute": 0.0145
|
|
},
|
|
"mode": "audio_transcription",
|
|
"output_cost_per_second": 0.0,
|
|
"source": "https://deepgram.com/pricing",
|
|
"supported_endpoints": [
|
|
"/v1/audio/transcriptions"
|
|
]
|
|
},
|
|
"deepgram/enhanced-general": {
|
|
"input_cost_per_second": 0.00024167,
|
|
"litellm_provider": "deepgram",
|
|
"metadata": {
|
|
"calculation": "$0.0145/60 seconds = $0.00024167 per second",
|
|
"original_pricing_per_minute": 0.0145
|
|
},
|
|
"mode": "audio_transcription",
|
|
"output_cost_per_second": 0.0,
|
|
"source": "https://deepgram.com/pricing",
|
|
"supported_endpoints": [
|
|
"/v1/audio/transcriptions"
|
|
]
|
|
},
|
|
"deepgram/enhanced-meeting": {
|
|
"input_cost_per_second": 0.00024167,
|
|
"litellm_provider": "deepgram",
|
|
"metadata": {
|
|
"calculation": "$0.0145/60 seconds = $0.00024167 per second",
|
|
"original_pricing_per_minute": 0.0145
|
|
},
|
|
"mode": "audio_transcription",
|
|
"output_cost_per_second": 0.0,
|
|
"source": "https://deepgram.com/pricing",
|
|
"supported_endpoints": [
|
|
"/v1/audio/transcriptions"
|
|
]
|
|
},
|
|
"deepgram/enhanced-phonecall": {
|
|
"input_cost_per_second": 0.00024167,
|
|
"litellm_provider": "deepgram",
|
|
"metadata": {
|
|
"calculation": "$0.0145/60 seconds = $0.00024167 per second",
|
|
"original_pricing_per_minute": 0.0145
|
|
},
|
|
"mode": "audio_transcription",
|
|
"output_cost_per_second": 0.0,
|
|
"source": "https://deepgram.com/pricing",
|
|
"supported_endpoints": [
|
|
"/v1/audio/transcriptions"
|
|
]
|
|
},
|
|
"deepgram/nova": {
|
|
"input_cost_per_second": 7.167e-05,
|
|
"litellm_provider": "deepgram",
|
|
"metadata": {
|
|
"calculation": "$0.0043/60 seconds = $0.00007167 per second",
|
|
"original_pricing_per_minute": 0.0043
|
|
},
|
|
"mode": "audio_transcription",
|
|
"output_cost_per_second": 0.0,
|
|
"source": "https://deepgram.com/pricing",
|
|
"supported_endpoints": [
|
|
"/v1/audio/transcriptions"
|
|
]
|
|
},
|
|
"deepgram/nova-2": {
|
|
"input_cost_per_second": 7.167e-05,
|
|
"litellm_provider": "deepgram",
|
|
"metadata": {
|
|
"calculation": "$0.0043/60 seconds = $0.00007167 per second",
|
|
"original_pricing_per_minute": 0.0043
|
|
},
|
|
"mode": "audio_transcription",
|
|
"output_cost_per_second": 0.0,
|
|
"source": "https://deepgram.com/pricing",
|
|
"supported_endpoints": [
|
|
"/v1/audio/transcriptions"
|
|
]
|
|
},
|
|
"deepgram/nova-2-atc": {
|
|
"input_cost_per_second": 7.167e-05,
|
|
"litellm_provider": "deepgram",
|
|
"metadata": {
|
|
"calculation": "$0.0043/60 seconds = $0.00007167 per second",
|
|
"original_pricing_per_minute": 0.0043
|
|
},
|
|
"mode": "audio_transcription",
|
|
"output_cost_per_second": 0.0,
|
|
"source": "https://deepgram.com/pricing",
|
|
"supported_endpoints": [
|
|
"/v1/audio/transcriptions"
|
|
]
|
|
},
|
|
"deepgram/nova-2-automotive": {
|
|
"input_cost_per_second": 7.167e-05,
|
|
"litellm_provider": "deepgram",
|
|
"metadata": {
|
|
"calculation": "$0.0043/60 seconds = $0.00007167 per second",
|
|
"original_pricing_per_minute": 0.0043
|
|
},
|
|
"mode": "audio_transcription",
|
|
"output_cost_per_second": 0.0,
|
|
"source": "https://deepgram.com/pricing",
|
|
"supported_endpoints": [
|
|
"/v1/audio/transcriptions"
|
|
]
|
|
},
|
|
"deepgram/nova-2-conversationalai": {
|
|
"input_cost_per_second": 7.167e-05,
|
|
"litellm_provider": "deepgram",
|
|
"metadata": {
|
|
"calculation": "$0.0043/60 seconds = $0.00007167 per second",
|
|
"original_pricing_per_minute": 0.0043
|
|
},
|
|
"mode": "audio_transcription",
|
|
"output_cost_per_second": 0.0,
|
|
"source": "https://deepgram.com/pricing",
|
|
"supported_endpoints": [
|
|
"/v1/audio/transcriptions"
|
|
]
|
|
},
|
|
"deepgram/nova-2-drivethru": {
|
|
"input_cost_per_second": 7.167e-05,
|
|
"litellm_provider": "deepgram",
|
|
"metadata": {
|
|
"calculation": "$0.0043/60 seconds = $0.00007167 per second",
|
|
"original_pricing_per_minute": 0.0043
|
|
},
|
|
"mode": "audio_transcription",
|
|
"output_cost_per_second": 0.0,
|
|
"source": "https://deepgram.com/pricing",
|
|
"supported_endpoints": [
|
|
"/v1/audio/transcriptions"
|
|
]
|
|
},
|
|
"deepgram/nova-2-finance": {
|
|
"input_cost_per_second": 7.167e-05,
|
|
"litellm_provider": "deepgram",
|
|
"metadata": {
|
|
"calculation": "$0.0043/60 seconds = $0.00007167 per second",
|
|
"original_pricing_per_minute": 0.0043
|
|
},
|
|
"mode": "audio_transcription",
|
|
"output_cost_per_second": 0.0,
|
|
"source": "https://deepgram.com/pricing",
|
|
"supported_endpoints": [
|
|
"/v1/audio/transcriptions"
|
|
]
|
|
},
|
|
"deepgram/nova-2-general": {
|
|
"input_cost_per_second": 7.167e-05,
|
|
"litellm_provider": "deepgram",
|
|
"metadata": {
|
|
"calculation": "$0.0043/60 seconds = $0.00007167 per second",
|
|
"original_pricing_per_minute": 0.0043
|
|
},
|
|
"mode": "audio_transcription",
|
|
"output_cost_per_second": 0.0,
|
|
"source": "https://deepgram.com/pricing",
|
|
"supported_endpoints": [
|
|
"/v1/audio/transcriptions"
|
|
]
|
|
},
|
|
"deepgram/nova-2-meeting": {
|
|
"input_cost_per_second": 7.167e-05,
|
|
"litellm_provider": "deepgram",
|
|
"metadata": {
|
|
"calculation": "$0.0043/60 seconds = $0.00007167 per second",
|
|
"original_pricing_per_minute": 0.0043
|
|
},
|
|
"mode": "audio_transcription",
|
|
"output_cost_per_second": 0.0,
|
|
"source": "https://deepgram.com/pricing",
|
|
"supported_endpoints": [
|
|
"/v1/audio/transcriptions"
|
|
]
|
|
},
|
|
"deepgram/nova-2-phonecall": {
|
|
"input_cost_per_second": 7.167e-05,
|
|
"litellm_provider": "deepgram",
|
|
"metadata": {
|
|
"calculation": "$0.0043/60 seconds = $0.00007167 per second",
|
|
"original_pricing_per_minute": 0.0043
|
|
},
|
|
"mode": "audio_transcription",
|
|
"output_cost_per_second": 0.0,
|
|
"source": "https://deepgram.com/pricing",
|
|
"supported_endpoints": [
|
|
"/v1/audio/transcriptions"
|
|
]
|
|
},
|
|
"deepgram/nova-2-video": {
|
|
"input_cost_per_second": 7.167e-05,
|
|
"litellm_provider": "deepgram",
|
|
"metadata": {
|
|
"calculation": "$0.0043/60 seconds = $0.00007167 per second",
|
|
"original_pricing_per_minute": 0.0043
|
|
},
|
|
"mode": "audio_transcription",
|
|
"output_cost_per_second": 0.0,
|
|
"source": "https://deepgram.com/pricing",
|
|
"supported_endpoints": [
|
|
"/v1/audio/transcriptions"
|
|
]
|
|
},
|
|
"deepgram/nova-2-voicemail": {
|
|
"input_cost_per_second": 7.167e-05,
|
|
"litellm_provider": "deepgram",
|
|
"metadata": {
|
|
"calculation": "$0.0043/60 seconds = $0.00007167 per second",
|
|
"original_pricing_per_minute": 0.0043
|
|
},
|
|
"mode": "audio_transcription",
|
|
"output_cost_per_second": 0.0,
|
|
"source": "https://deepgram.com/pricing",
|
|
"supported_endpoints": [
|
|
"/v1/audio/transcriptions"
|
|
]
|
|
},
|
|
"deepgram/nova-3": {
|
|
"input_cost_per_second": 7.167e-05,
|
|
"litellm_provider": "deepgram",
|
|
"metadata": {
|
|
"calculation": "$0.0043/60 seconds = $0.00007167 per second",
|
|
"original_pricing_per_minute": 0.0043
|
|
},
|
|
"mode": "audio_transcription",
|
|
"output_cost_per_second": 0.0,
|
|
"source": "https://deepgram.com/pricing",
|
|
"supported_endpoints": [
|
|
"/v1/audio/transcriptions"
|
|
]
|
|
},
|
|
"deepgram/nova-3-general": {
|
|
"input_cost_per_second": 7.167e-05,
|
|
"litellm_provider": "deepgram",
|
|
"metadata": {
|
|
"calculation": "$0.0043/60 seconds = $0.00007167 per second",
|
|
"original_pricing_per_minute": 0.0043
|
|
},
|
|
"mode": "audio_transcription",
|
|
"output_cost_per_second": 0.0,
|
|
"source": "https://deepgram.com/pricing",
|
|
"supported_endpoints": [
|
|
"/v1/audio/transcriptions"
|
|
]
|
|
},
|
|
"deepgram/nova-3-medical": {
|
|
"input_cost_per_second": 8.667e-05,
|
|
"litellm_provider": "deepgram",
|
|
"metadata": {
|
|
"calculation": "$0.0052/60 seconds = $0.00008667 per second (multilingual)",
|
|
"original_pricing_per_minute": 0.0052
|
|
},
|
|
"mode": "audio_transcription",
|
|
"output_cost_per_second": 0.0,
|
|
"source": "https://deepgram.com/pricing",
|
|
"supported_endpoints": [
|
|
"/v1/audio/transcriptions"
|
|
]
|
|
},
|
|
"deepgram/nova-general": {
|
|
"input_cost_per_second": 7.167e-05,
|
|
"litellm_provider": "deepgram",
|
|
"metadata": {
|
|
"calculation": "$0.0043/60 seconds = $0.00007167 per second",
|
|
"original_pricing_per_minute": 0.0043
|
|
},
|
|
"mode": "audio_transcription",
|
|
"output_cost_per_second": 0.0,
|
|
"source": "https://deepgram.com/pricing",
|
|
"supported_endpoints": [
|
|
"/v1/audio/transcriptions"
|
|
]
|
|
},
|
|
"deepgram/nova-phonecall": {
|
|
"input_cost_per_second": 7.167e-05,
|
|
"litellm_provider": "deepgram",
|
|
"metadata": {
|
|
"calculation": "$0.0043/60 seconds = $0.00007167 per second",
|
|
"original_pricing_per_minute": 0.0043
|
|
},
|
|
"mode": "audio_transcription",
|
|
"output_cost_per_second": 0.0,
|
|
"source": "https://deepgram.com/pricing",
|
|
"supported_endpoints": [
|
|
"/v1/audio/transcriptions"
|
|
]
|
|
},
|
|
"deepgram/whisper": {
|
|
"input_cost_per_second": 0.0001,
|
|
"litellm_provider": "deepgram",
|
|
"metadata": {
|
|
"notes": "Deepgram's hosted OpenAI Whisper models - pricing may differ from native Deepgram models"
|
|
},
|
|
"mode": "audio_transcription",
|
|
"output_cost_per_second": 0.0,
|
|
"source": "https://deepgram.com/pricing",
|
|
"supported_endpoints": [
|
|
"/v1/audio/transcriptions"
|
|
]
|
|
},
|
|
"deepgram/whisper-base": {
|
|
"input_cost_per_second": 0.0001,
|
|
"litellm_provider": "deepgram",
|
|
"metadata": {
|
|
"notes": "Deepgram's hosted OpenAI Whisper models - pricing may differ from native Deepgram models"
|
|
},
|
|
"mode": "audio_transcription",
|
|
"output_cost_per_second": 0.0,
|
|
"source": "https://deepgram.com/pricing",
|
|
"supported_endpoints": [
|
|
"/v1/audio/transcriptions"
|
|
]
|
|
},
|
|
"deepgram/whisper-large": {
|
|
"input_cost_per_second": 0.0001,
|
|
"litellm_provider": "deepgram",
|
|
"metadata": {
|
|
"notes": "Deepgram's hosted OpenAI Whisper models - pricing may differ from native Deepgram models"
|
|
},
|
|
"mode": "audio_transcription",
|
|
"output_cost_per_second": 0.0,
|
|
"source": "https://deepgram.com/pricing",
|
|
"supported_endpoints": [
|
|
"/v1/audio/transcriptions"
|
|
]
|
|
},
|
|
"deepgram/whisper-medium": {
|
|
"input_cost_per_second": 0.0001,
|
|
"litellm_provider": "deepgram",
|
|
"metadata": {
|
|
"notes": "Deepgram's hosted OpenAI Whisper models - pricing may differ from native Deepgram models"
|
|
},
|
|
"mode": "audio_transcription",
|
|
"output_cost_per_second": 0.0,
|
|
"source": "https://deepgram.com/pricing",
|
|
"supported_endpoints": [
|
|
"/v1/audio/transcriptions"
|
|
]
|
|
},
|
|
"deepgram/whisper-small": {
|
|
"input_cost_per_second": 0.0001,
|
|
"litellm_provider": "deepgram",
|
|
"metadata": {
|
|
"notes": "Deepgram's hosted OpenAI Whisper models - pricing may differ from native Deepgram models"
|
|
},
|
|
"mode": "audio_transcription",
|
|
"output_cost_per_second": 0.0,
|
|
"source": "https://deepgram.com/pricing",
|
|
"supported_endpoints": [
|
|
"/v1/audio/transcriptions"
|
|
]
|
|
},
|
|
"deepgram/whisper-tiny": {
|
|
"input_cost_per_second": 0.0001,
|
|
"litellm_provider": "deepgram",
|
|
"metadata": {
|
|
"notes": "Deepgram's hosted OpenAI Whisper models - pricing may differ from native Deepgram models"
|
|
},
|
|
"mode": "audio_transcription",
|
|
"output_cost_per_second": 0.0,
|
|
"source": "https://deepgram.com/pricing",
|
|
"supported_endpoints": [
|
|
"/v1/audio/transcriptions"
|
|
]
|
|
},
|
|
"deepinfra/Gryphe/MythoMax-L2-13b": {
|
|
"max_tokens": 4096,
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 4096,
|
|
"input_cost_per_token": 8e-08,
|
|
"output_cost_per_token": 9e-08,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": true
|
|
},
|
|
"deepinfra/NousResearch/Hermes-3-Llama-3.1-405B": {
|
|
"max_tokens": 131072,
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"input_cost_per_token": 1e-06,
|
|
"output_cost_per_token": 1e-06,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": true
|
|
},
|
|
"deepinfra/NousResearch/Hermes-3-Llama-3.1-70B": {
|
|
"max_tokens": 131072,
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"input_cost_per_token": 3e-07,
|
|
"output_cost_per_token": 3e-07,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": false
|
|
},
|
|
"deepinfra/Qwen/QwQ-32B": {
|
|
"max_tokens": 131072,
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"input_cost_per_token": 1.5e-07,
|
|
"output_cost_per_token": 4e-07,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": true
|
|
},
|
|
"deepinfra/Qwen/Qwen2.5-72B-Instruct": {
|
|
"max_tokens": 32768,
|
|
"max_input_tokens": 32768,
|
|
"max_output_tokens": 32768,
|
|
"input_cost_per_token": 1.2e-07,
|
|
"output_cost_per_token": 3.9e-07,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": true
|
|
},
|
|
"deepinfra/Qwen/Qwen2.5-7B-Instruct": {
|
|
"max_tokens": 32768,
|
|
"max_input_tokens": 32768,
|
|
"max_output_tokens": 32768,
|
|
"input_cost_per_token": 4e-08,
|
|
"output_cost_per_token": 1e-07,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": false
|
|
},
|
|
"deepinfra/Qwen/Qwen2.5-VL-32B-Instruct": {
|
|
"max_tokens": 128000,
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 128000,
|
|
"input_cost_per_token": 2e-07,
|
|
"output_cost_per_token": 6e-07,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"deepinfra/Qwen/Qwen3-14B": {
|
|
"max_tokens": 40960,
|
|
"max_input_tokens": 40960,
|
|
"max_output_tokens": 40960,
|
|
"input_cost_per_token": 6e-08,
|
|
"output_cost_per_token": 2.4e-07,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": true
|
|
},
|
|
"deepinfra/Qwen/Qwen3-235B-A22B": {
|
|
"max_tokens": 40960,
|
|
"max_input_tokens": 40960,
|
|
"max_output_tokens": 40960,
|
|
"input_cost_per_token": 1.8e-07,
|
|
"output_cost_per_token": 5.4e-07,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": true
|
|
},
|
|
"deepinfra/Qwen/Qwen3-235B-A22B-Instruct-2507": {
|
|
"max_tokens": 262144,
|
|
"max_input_tokens": 262144,
|
|
"max_output_tokens": 262144,
|
|
"input_cost_per_token": 9e-08,
|
|
"output_cost_per_token": 6e-07,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": true
|
|
},
|
|
"deepinfra/Qwen/Qwen3-235B-A22B-Thinking-2507": {
|
|
"max_tokens": 262144,
|
|
"max_input_tokens": 262144,
|
|
"max_output_tokens": 262144,
|
|
"input_cost_per_token": 3e-07,
|
|
"output_cost_per_token": 2.9e-06,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": true
|
|
},
|
|
"deepinfra/Qwen/Qwen3-30B-A3B": {
|
|
"max_tokens": 40960,
|
|
"max_input_tokens": 40960,
|
|
"max_output_tokens": 40960,
|
|
"input_cost_per_token": 8e-08,
|
|
"output_cost_per_token": 2.9e-07,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": true
|
|
},
|
|
"deepinfra/Qwen/Qwen3-32B": {
|
|
"max_tokens": 40960,
|
|
"max_input_tokens": 40960,
|
|
"max_output_tokens": 40960,
|
|
"input_cost_per_token": 1e-07,
|
|
"output_cost_per_token": 2.8e-07,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": true
|
|
},
|
|
"deepinfra/Qwen/Qwen3-Coder-480B-A35B-Instruct": {
|
|
"max_tokens": 262144,
|
|
"max_input_tokens": 262144,
|
|
"max_output_tokens": 262144,
|
|
"input_cost_per_token": 4e-07,
|
|
"output_cost_per_token": 1.6e-06,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": true
|
|
},
|
|
"deepinfra/Qwen/Qwen3-Coder-480B-A35B-Instruct-Turbo": {
|
|
"max_tokens": 262144,
|
|
"max_input_tokens": 262144,
|
|
"max_output_tokens": 262144,
|
|
"input_cost_per_token": 2.9e-07,
|
|
"output_cost_per_token": 1.2e-06,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": true
|
|
},
|
|
"deepinfra/Qwen/Qwen3-Next-80B-A3B-Instruct": {
|
|
"max_tokens": 262144,
|
|
"max_input_tokens": 262144,
|
|
"max_output_tokens": 262144,
|
|
"input_cost_per_token": 1.4e-07,
|
|
"output_cost_per_token": 1.4e-06,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": true
|
|
},
|
|
"deepinfra/Qwen/Qwen3-Next-80B-A3B-Thinking": {
|
|
"max_tokens": 262144,
|
|
"max_input_tokens": 262144,
|
|
"max_output_tokens": 262144,
|
|
"input_cost_per_token": 1.4e-07,
|
|
"output_cost_per_token": 1.4e-06,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": true
|
|
},
|
|
"deepinfra/Sao10K/L3-8B-Lunaris-v1-Turbo": {
|
|
"max_tokens": 8192,
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"input_cost_per_token": 4e-08,
|
|
"output_cost_per_token": 5e-08,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": false
|
|
},
|
|
"deepinfra/Sao10K/L3.1-70B-Euryale-v2.2": {
|
|
"max_tokens": 131072,
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"input_cost_per_token": 6.5e-07,
|
|
"output_cost_per_token": 7.5e-07,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": false
|
|
},
|
|
"deepinfra/Sao10K/L3.3-70B-Euryale-v2.3": {
|
|
"max_tokens": 131072,
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"input_cost_per_token": 6.5e-07,
|
|
"output_cost_per_token": 7.5e-07,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": false
|
|
},
|
|
"deepinfra/allenai/olmOCR-7B-0725-FP8": {
|
|
"max_tokens": 16384,
|
|
"max_input_tokens": 16384,
|
|
"max_output_tokens": 16384,
|
|
"input_cost_per_token": 2.7e-07,
|
|
"output_cost_per_token": 1.5e-06,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": false
|
|
},
|
|
"deepinfra/anthropic/claude-3-7-sonnet-latest": {
|
|
"max_tokens": 200000,
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 200000,
|
|
"input_cost_per_token": 3.3e-06,
|
|
"output_cost_per_token": 1.65e-05,
|
|
"cache_read_input_token_cost": 3.3e-07,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": true
|
|
},
|
|
"deepinfra/anthropic/claude-4-opus": {
|
|
"max_tokens": 200000,
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 200000,
|
|
"input_cost_per_token": 1.65e-05,
|
|
"output_cost_per_token": 8.25e-05,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": true
|
|
},
|
|
"deepinfra/anthropic/claude-4-sonnet": {
|
|
"max_tokens": 200000,
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 200000,
|
|
"input_cost_per_token": 3.3e-06,
|
|
"output_cost_per_token": 1.65e-05,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": true
|
|
},
|
|
"deepinfra/deepseek-ai/DeepSeek-R1": {
|
|
"max_tokens": 163840,
|
|
"max_input_tokens": 163840,
|
|
"max_output_tokens": 163840,
|
|
"input_cost_per_token": 7e-07,
|
|
"output_cost_per_token": 2.4e-06,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": true
|
|
},
|
|
"deepinfra/deepseek-ai/DeepSeek-R1-0528": {
|
|
"max_tokens": 163840,
|
|
"max_input_tokens": 163840,
|
|
"max_output_tokens": 163840,
|
|
"input_cost_per_token": 5e-07,
|
|
"output_cost_per_token": 2.15e-06,
|
|
"cache_read_input_token_cost": 4e-07,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": true
|
|
},
|
|
"deepinfra/deepseek-ai/DeepSeek-R1-0528-Turbo": {
|
|
"max_tokens": 32768,
|
|
"max_input_tokens": 32768,
|
|
"max_output_tokens": 32768,
|
|
"input_cost_per_token": 1e-06,
|
|
"output_cost_per_token": 3e-06,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": true
|
|
},
|
|
"deepinfra/deepseek-ai/DeepSeek-R1-Distill-Llama-70B": {
|
|
"max_tokens": 131072,
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"input_cost_per_token": 2e-07,
|
|
"output_cost_per_token": 6e-07,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": false
|
|
},
|
|
"deepinfra/deepseek-ai/DeepSeek-R1-Distill-Qwen-32B": {
|
|
"max_tokens": 131072,
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"input_cost_per_token": 2.7e-07,
|
|
"output_cost_per_token": 2.7e-07,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": true
|
|
},
|
|
"deepinfra/deepseek-ai/DeepSeek-R1-Turbo": {
|
|
"max_tokens": 40960,
|
|
"max_input_tokens": 40960,
|
|
"max_output_tokens": 40960,
|
|
"input_cost_per_token": 1e-06,
|
|
"output_cost_per_token": 3e-06,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": true
|
|
},
|
|
"deepinfra/deepseek-ai/DeepSeek-V3": {
|
|
"max_tokens": 163840,
|
|
"max_input_tokens": 163840,
|
|
"max_output_tokens": 163840,
|
|
"input_cost_per_token": 3.8e-07,
|
|
"output_cost_per_token": 8.9e-07,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": true
|
|
},
|
|
"deepinfra/deepseek-ai/DeepSeek-V3-0324": {
|
|
"max_tokens": 163840,
|
|
"max_input_tokens": 163840,
|
|
"max_output_tokens": 163840,
|
|
"input_cost_per_token": 2.5e-07,
|
|
"output_cost_per_token": 8.8e-07,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": true
|
|
},
|
|
"deepinfra/deepseek-ai/DeepSeek-V3.1": {
|
|
"max_tokens": 163840,
|
|
"max_input_tokens": 163840,
|
|
"max_output_tokens": 163840,
|
|
"input_cost_per_token": 2.7e-07,
|
|
"output_cost_per_token": 1e-06,
|
|
"cache_read_input_token_cost": 2.16e-07,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": true,
|
|
"supports_reasoning": true
|
|
},
|
|
"deepinfra/deepseek-ai/DeepSeek-V3.1-Terminus": {
|
|
"max_tokens": 163840,
|
|
"max_input_tokens": 163840,
|
|
"max_output_tokens": 163840,
|
|
"input_cost_per_token": 2.7e-07,
|
|
"output_cost_per_token": 1e-06,
|
|
"cache_read_input_token_cost": 2.16e-07,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": true
|
|
},
|
|
"deepinfra/google/gemini-2.0-flash-001": {
|
|
"max_tokens": 1000000,
|
|
"max_input_tokens": 1000000,
|
|
"max_output_tokens": 1000000,
|
|
"input_cost_per_token": 1e-07,
|
|
"output_cost_per_token": 4e-07,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": true
|
|
},
|
|
"deepinfra/google/gemini-2.5-flash": {
|
|
"max_tokens": 1000000,
|
|
"max_input_tokens": 1000000,
|
|
"max_output_tokens": 1000000,
|
|
"input_cost_per_token": 3e-07,
|
|
"output_cost_per_token": 2.5e-06,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": true
|
|
},
|
|
"deepinfra/google/gemini-2.5-pro": {
|
|
"max_tokens": 1000000,
|
|
"max_input_tokens": 1000000,
|
|
"max_output_tokens": 1000000,
|
|
"input_cost_per_token": 1.25e-06,
|
|
"output_cost_per_token": 1e-05,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": true
|
|
},
|
|
"deepinfra/google/gemma-3-12b-it": {
|
|
"max_tokens": 131072,
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"input_cost_per_token": 5e-08,
|
|
"output_cost_per_token": 1e-07,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": true
|
|
},
|
|
"deepinfra/google/gemma-3-27b-it": {
|
|
"max_tokens": 131072,
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"input_cost_per_token": 9e-08,
|
|
"output_cost_per_token": 1.6e-07,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": true
|
|
},
|
|
"deepinfra/google/gemma-3-4b-it": {
|
|
"max_tokens": 131072,
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"input_cost_per_token": 4e-08,
|
|
"output_cost_per_token": 8e-08,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": true
|
|
},
|
|
"deepinfra/meta-llama/Llama-3.2-11B-Vision-Instruct": {
|
|
"max_tokens": 131072,
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"input_cost_per_token": 4.9e-08,
|
|
"output_cost_per_token": 4.9e-08,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": false
|
|
},
|
|
"deepinfra/meta-llama/Llama-3.2-3B-Instruct": {
|
|
"max_tokens": 131072,
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"input_cost_per_token": 2e-08,
|
|
"output_cost_per_token": 2e-08,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": true
|
|
},
|
|
"deepinfra/meta-llama/Llama-3.3-70B-Instruct": {
|
|
"max_tokens": 131072,
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"input_cost_per_token": 2.3e-07,
|
|
"output_cost_per_token": 4e-07,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": true
|
|
},
|
|
"deepinfra/meta-llama/Llama-3.3-70B-Instruct-Turbo": {
|
|
"max_tokens": 131072,
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"input_cost_per_token": 1.3e-07,
|
|
"output_cost_per_token": 3.9e-07,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": true
|
|
},
|
|
"deepinfra/meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8": {
|
|
"max_tokens": 1048576,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 1048576,
|
|
"input_cost_per_token": 1.5e-07,
|
|
"output_cost_per_token": 6e-07,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": true
|
|
},
|
|
"deepinfra/meta-llama/Llama-4-Scout-17B-16E-Instruct": {
|
|
"max_tokens": 327680,
|
|
"max_input_tokens": 327680,
|
|
"max_output_tokens": 327680,
|
|
"input_cost_per_token": 8e-08,
|
|
"output_cost_per_token": 3e-07,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": true
|
|
},
|
|
"deepinfra/meta-llama/Llama-Guard-3-8B": {
|
|
"max_tokens": 131072,
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"input_cost_per_token": 5.5e-08,
|
|
"output_cost_per_token": 5.5e-08,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": false
|
|
},
|
|
"deepinfra/meta-llama/Llama-Guard-4-12B": {
|
|
"max_tokens": 163840,
|
|
"max_input_tokens": 163840,
|
|
"max_output_tokens": 163840,
|
|
"input_cost_per_token": 1.8e-07,
|
|
"output_cost_per_token": 1.8e-07,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": false
|
|
},
|
|
"deepinfra/meta-llama/Meta-Llama-3-8B-Instruct": {
|
|
"max_tokens": 8192,
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"input_cost_per_token": 3e-08,
|
|
"output_cost_per_token": 6e-08,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": true
|
|
},
|
|
"deepinfra/meta-llama/Meta-Llama-3.1-70B-Instruct": {
|
|
"max_tokens": 131072,
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"input_cost_per_token": 4e-07,
|
|
"output_cost_per_token": 4e-07,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": true
|
|
},
|
|
"deepinfra/meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo": {
|
|
"max_tokens": 131072,
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"input_cost_per_token": 1e-07,
|
|
"output_cost_per_token": 2.8e-07,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": true
|
|
},
|
|
"deepinfra/meta-llama/Meta-Llama-3.1-8B-Instruct": {
|
|
"max_tokens": 131072,
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"input_cost_per_token": 3e-08,
|
|
"output_cost_per_token": 5e-08,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": true
|
|
},
|
|
"deepinfra/meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo": {
|
|
"max_tokens": 131072,
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"input_cost_per_token": 2e-08,
|
|
"output_cost_per_token": 3e-08,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": true
|
|
},
|
|
"deepinfra/microsoft/WizardLM-2-8x22B": {
|
|
"max_tokens": 65536,
|
|
"max_input_tokens": 65536,
|
|
"max_output_tokens": 65536,
|
|
"input_cost_per_token": 4.8e-07,
|
|
"output_cost_per_token": 4.8e-07,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": false
|
|
},
|
|
"deepinfra/microsoft/phi-4": {
|
|
"max_tokens": 16384,
|
|
"max_input_tokens": 16384,
|
|
"max_output_tokens": 16384,
|
|
"input_cost_per_token": 7e-08,
|
|
"output_cost_per_token": 1.4e-07,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": true
|
|
},
|
|
"deepinfra/mistralai/Mistral-Nemo-Instruct-2407": {
|
|
"max_tokens": 131072,
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"input_cost_per_token": 2e-08,
|
|
"output_cost_per_token": 4e-08,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": true
|
|
},
|
|
"deepinfra/mistralai/Mistral-Small-24B-Instruct-2501": {
|
|
"max_tokens": 32768,
|
|
"max_input_tokens": 32768,
|
|
"max_output_tokens": 32768,
|
|
"input_cost_per_token": 5e-08,
|
|
"output_cost_per_token": 8e-08,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": true
|
|
},
|
|
"deepinfra/mistralai/Mistral-Small-3.2-24B-Instruct-2506": {
|
|
"max_tokens": 128000,
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 128000,
|
|
"input_cost_per_token": 7.5e-08,
|
|
"output_cost_per_token": 2e-07,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": true
|
|
},
|
|
"deepinfra/mistralai/Mixtral-8x7B-Instruct-v0.1": {
|
|
"max_tokens": 32768,
|
|
"max_input_tokens": 32768,
|
|
"max_output_tokens": 32768,
|
|
"input_cost_per_token": 4e-07,
|
|
"output_cost_per_token": 4e-07,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": true
|
|
},
|
|
"deepinfra/moonshotai/Kimi-K2-Instruct": {
|
|
"max_tokens": 131072,
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"input_cost_per_token": 5e-07,
|
|
"output_cost_per_token": 2e-06,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": true
|
|
},
|
|
"deepinfra/moonshotai/Kimi-K2-Instruct-0905": {
|
|
"max_tokens": 262144,
|
|
"max_input_tokens": 262144,
|
|
"max_output_tokens": 262144,
|
|
"input_cost_per_token": 5e-07,
|
|
"output_cost_per_token": 2e-06,
|
|
"cache_read_input_token_cost": 4e-07,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": true
|
|
},
|
|
"deepinfra/nvidia/Llama-3.1-Nemotron-70B-Instruct": {
|
|
"max_tokens": 131072,
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"input_cost_per_token": 6e-07,
|
|
"output_cost_per_token": 6e-07,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": true
|
|
},
|
|
"deepinfra/nvidia/Llama-3.3-Nemotron-Super-49B-v1.5": {
|
|
"max_tokens": 131072,
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"input_cost_per_token": 1e-07,
|
|
"output_cost_per_token": 4e-07,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": true
|
|
},
|
|
"deepinfra/nvidia/NVIDIA-Nemotron-Nano-9B-v2": {
|
|
"max_tokens": 131072,
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"input_cost_per_token": 4e-08,
|
|
"output_cost_per_token": 1.6e-07,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": true
|
|
},
|
|
"deepinfra/openai/gpt-oss-120b": {
|
|
"max_tokens": 131072,
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"input_cost_per_token": 5e-08,
|
|
"output_cost_per_token": 4.5e-07,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": true
|
|
},
|
|
"deepinfra/openai/gpt-oss-20b": {
|
|
"max_tokens": 131072,
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"input_cost_per_token": 4e-08,
|
|
"output_cost_per_token": 1.5e-07,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": true
|
|
},
|
|
"deepinfra/zai-org/GLM-4.5": {
|
|
"max_tokens": 131072,
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"input_cost_per_token": 4e-07,
|
|
"output_cost_per_token": 1.6e-06,
|
|
"litellm_provider": "deepinfra",
|
|
"mode": "chat",
|
|
"supports_tool_choice": true
|
|
},
|
|
"deepseek/deepseek-chat": {
|
|
"cache_creation_input_token_cost": 0.0,
|
|
"cache_read_input_token_cost": 7e-08,
|
|
"input_cost_per_token": 2.7e-07,
|
|
"input_cost_per_token_cache_hit": 7e-08,
|
|
"litellm_provider": "deepseek",
|
|
"max_input_tokens": 65536,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.1e-06,
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"deepseek/deepseek-coder": {
|
|
"input_cost_per_token": 1.4e-07,
|
|
"input_cost_per_token_cache_hit": 1.4e-08,
|
|
"litellm_provider": "deepseek",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.8e-07,
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"deepseek/deepseek-r1": {
|
|
"input_cost_per_token": 5.5e-07,
|
|
"input_cost_per_token_cache_hit": 1.4e-07,
|
|
"litellm_provider": "deepseek",
|
|
"max_input_tokens": 65536,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.19e-06,
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"deepseek/deepseek-reasoner": {
|
|
"input_cost_per_token": 5.5e-07,
|
|
"input_cost_per_token_cache_hit": 1.4e-07,
|
|
"litellm_provider": "deepseek",
|
|
"max_input_tokens": 65536,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.19e-06,
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"deepseek/deepseek-v3": {
|
|
"cache_creation_input_token_cost": 0.0,
|
|
"cache_read_input_token_cost": 7e-08,
|
|
"input_cost_per_token": 2.7e-07,
|
|
"input_cost_per_token_cache_hit": 7e-08,
|
|
"litellm_provider": "deepseek",
|
|
"max_input_tokens": 65536,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.1e-06,
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"deepseek/deepseek-v3.2": {
|
|
"input_cost_per_token": 2.8e-07,
|
|
"input_cost_per_token_cache_hit": 2.8e-08,
|
|
"litellm_provider": "deepseek",
|
|
"max_input_tokens": 163840,
|
|
"max_output_tokens": 163840,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4e-07,
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"deepseek.v3-v1:0": {
|
|
"input_cost_per_token": 5.8e-07,
|
|
"litellm_provider": "bedrock_converse",
|
|
"max_input_tokens": 163840,
|
|
"max_output_tokens": 81920,
|
|
"max_tokens": 163840,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.68e-06,
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"dolphin": {
|
|
"input_cost_per_token": 5e-07,
|
|
"litellm_provider": "nlp_cloud",
|
|
"max_input_tokens": 16384,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "completion",
|
|
"output_cost_per_token": 5e-07
|
|
},
|
|
"doubao-embedding": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "volcengine",
|
|
"max_input_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"metadata": {
|
|
"notes": "Volcengine Doubao embedding model - standard version with 2560 dimensions"
|
|
},
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0,
|
|
"output_vector_size": 2560
|
|
},
|
|
"doubao-embedding-large": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "volcengine",
|
|
"max_input_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"metadata": {
|
|
"notes": "Volcengine Doubao embedding model - large version with 2048 dimensions"
|
|
},
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0,
|
|
"output_vector_size": 2048
|
|
},
|
|
"doubao-embedding-large-text-240915": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "volcengine",
|
|
"max_input_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"metadata": {
|
|
"notes": "Volcengine Doubao embedding model - text-240915 version with 4096 dimensions"
|
|
},
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0,
|
|
"output_vector_size": 4096
|
|
},
|
|
"doubao-embedding-large-text-250515": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "volcengine",
|
|
"max_input_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"metadata": {
|
|
"notes": "Volcengine Doubao embedding model - text-250515 version with 2048 dimensions"
|
|
},
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0,
|
|
"output_vector_size": 2048
|
|
},
|
|
"doubao-embedding-text-240715": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "volcengine",
|
|
"max_input_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"metadata": {
|
|
"notes": "Volcengine Doubao embedding model - text-240715 version with 2560 dimensions"
|
|
},
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0,
|
|
"output_vector_size": 2560
|
|
},
|
|
"exa_ai/search": {
|
|
"litellm_provider": "exa_ai",
|
|
"mode": "search",
|
|
"tiered_pricing": [
|
|
{
|
|
"input_cost_per_query": 5e-03,
|
|
"max_results_range": [
|
|
0,
|
|
25
|
|
]
|
|
},
|
|
{
|
|
"input_cost_per_query": 25e-03,
|
|
"max_results_range": [
|
|
26,
|
|
100
|
|
]
|
|
}
|
|
]
|
|
},
|
|
"firecrawl/search": {
|
|
"litellm_provider": "firecrawl",
|
|
"mode": "search",
|
|
"tiered_pricing": [
|
|
{
|
|
"input_cost_per_query": 1.66e-03,
|
|
"max_results_range": [
|
|
1,
|
|
10
|
|
]
|
|
},
|
|
{
|
|
"input_cost_per_query": 3.32e-03,
|
|
"max_results_range": [
|
|
11,
|
|
20
|
|
]
|
|
},
|
|
{
|
|
"input_cost_per_query": 4.98e-03,
|
|
"max_results_range": [
|
|
21,
|
|
30
|
|
]
|
|
},
|
|
{
|
|
"input_cost_per_query": 6.64e-03,
|
|
"max_results_range": [
|
|
31,
|
|
40
|
|
]
|
|
},
|
|
{
|
|
"input_cost_per_query": 8.3e-03,
|
|
"max_results_range": [
|
|
41,
|
|
50
|
|
]
|
|
},
|
|
{
|
|
"input_cost_per_query": 9.96e-03,
|
|
"max_results_range": [
|
|
51,
|
|
60
|
|
]
|
|
},
|
|
{
|
|
"input_cost_per_query": 11.62e-03,
|
|
"max_results_range": [
|
|
61,
|
|
70
|
|
]
|
|
},
|
|
{
|
|
"input_cost_per_query": 13.28e-03,
|
|
"max_results_range": [
|
|
71,
|
|
80
|
|
]
|
|
},
|
|
{
|
|
"input_cost_per_query": 14.94e-03,
|
|
"max_results_range": [
|
|
81,
|
|
90
|
|
]
|
|
},
|
|
{
|
|
"input_cost_per_query": 16.6e-03,
|
|
"max_results_range": [
|
|
91,
|
|
100
|
|
]
|
|
}
|
|
],
|
|
"metadata": {
|
|
"notes": "Firecrawl search pricing: $83 for 100,000 credits, 2 credits per 10 results. Cost = ceiling(limit/10) * 2 * $0.00083"
|
|
}
|
|
},
|
|
"perplexity/search": {
|
|
"input_cost_per_query": 5e-03,
|
|
"litellm_provider": "perplexity",
|
|
"mode": "search"
|
|
},
|
|
"searxng/search": {
|
|
"litellm_provider": "searxng",
|
|
"mode": "search",
|
|
"input_cost_per_query": 0.0,
|
|
"metadata": {
|
|
"notes": "SearXNG is an open-source metasearch engine. Free to use when self-hosted or using public instances."
|
|
}
|
|
},
|
|
"elevenlabs/scribe_v1": {
|
|
"input_cost_per_second": 6.11e-05,
|
|
"litellm_provider": "elevenlabs",
|
|
"metadata": {
|
|
"calculation": "$0.22/hour = $0.00366/minute = $0.0000611 per second (enterprise pricing)",
|
|
"notes": "ElevenLabs Scribe v1 - state-of-the-art speech recognition model with 99 language support",
|
|
"original_pricing_per_hour": 0.22
|
|
},
|
|
"mode": "audio_transcription",
|
|
"output_cost_per_second": 0.0,
|
|
"source": "https://elevenlabs.io/pricing",
|
|
"supported_endpoints": [
|
|
"/v1/audio/transcriptions"
|
|
]
|
|
},
|
|
"elevenlabs/scribe_v1_experimental": {
|
|
"input_cost_per_second": 6.11e-05,
|
|
"litellm_provider": "elevenlabs",
|
|
"metadata": {
|
|
"calculation": "$0.22/hour = $0.00366/minute = $0.0000611 per second (enterprise pricing)",
|
|
"notes": "ElevenLabs Scribe v1 experimental - enhanced version of the main Scribe model",
|
|
"original_pricing_per_hour": 0.22
|
|
},
|
|
"mode": "audio_transcription",
|
|
"output_cost_per_second": 0.0,
|
|
"source": "https://elevenlabs.io/pricing",
|
|
"supported_endpoints": [
|
|
"/v1/audio/transcriptions"
|
|
]
|
|
},
|
|
"embed-english-light-v2.0": {
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "cohere",
|
|
"max_input_tokens": 1024,
|
|
"max_tokens": 1024,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"embed-english-light-v3.0": {
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "cohere",
|
|
"max_input_tokens": 1024,
|
|
"max_tokens": 1024,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"embed-english-v2.0": {
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "cohere",
|
|
"max_input_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"embed-english-v3.0": {
|
|
"input_cost_per_image": 0.0001,
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "cohere",
|
|
"max_input_tokens": 1024,
|
|
"max_tokens": 1024,
|
|
"metadata": {
|
|
"notes": "'supports_image_input' is a deprecated field. Use 'supports_embedding_image_input' instead."
|
|
},
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0,
|
|
"supports_embedding_image_input": true,
|
|
"supports_image_input": true
|
|
},
|
|
"embed-multilingual-v2.0": {
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "cohere",
|
|
"max_input_tokens": 768,
|
|
"max_tokens": 768,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"embed-multilingual-v3.0": {
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "cohere",
|
|
"max_input_tokens": 1024,
|
|
"max_tokens": 1024,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0,
|
|
"supports_embedding_image_input": true
|
|
},
|
|
"embed-multilingual-light-v3.0": {
|
|
"input_cost_per_token": 1e-04,
|
|
"litellm_provider": "cohere",
|
|
"max_input_tokens": 1024,
|
|
"max_tokens": 1024,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0,
|
|
"supports_embedding_image_input": true
|
|
},
|
|
"eu.amazon.nova-lite-v1:0": {
|
|
"input_cost_per_token": 7.8e-08,
|
|
"litellm_provider": "bedrock_converse",
|
|
"max_input_tokens": 300000,
|
|
"max_output_tokens": 10000,
|
|
"max_tokens": 10000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3.12e-07,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_vision": true
|
|
},
|
|
"eu.amazon.nova-micro-v1:0": {
|
|
"input_cost_per_token": 4.6e-08,
|
|
"litellm_provider": "bedrock_converse",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 10000,
|
|
"max_tokens": 10000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.84e-07,
|
|
"supports_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true
|
|
},
|
|
"eu.amazon.nova-pro-v1:0": {
|
|
"input_cost_per_token": 1.05e-06,
|
|
"litellm_provider": "bedrock_converse",
|
|
"max_input_tokens": 300000,
|
|
"max_output_tokens": 10000,
|
|
"max_tokens": 10000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4.2e-06,
|
|
"source": "https://aws.amazon.com/bedrock/pricing/",
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_vision": true
|
|
},
|
|
"eu.anthropic.claude-3-5-haiku-20241022-v1:0": {
|
|
"input_cost_per_token": 2.5e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.25e-06,
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"eu.anthropic.claude-haiku-4-5-20251001-v1:0": {
|
|
"cache_creation_input_token_cost": 1.375e-06,
|
|
"cache_read_input_token_cost": 1.1e-07,
|
|
"input_cost_per_token": 1.1e-06,
|
|
"deprecation_date": "2026-10-15",
|
|
"litellm_provider": "bedrock_converse",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 64000,
|
|
"max_tokens": 64000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 5.5e-06,
|
|
"source": "https://aws.amazon.com/about-aws/whats-new/2025/10/claude-4-5-haiku-anthropic-amazon-bedrock",
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tool_use_system_prompt_tokens": 346
|
|
},
|
|
"eu.anthropic.claude-3-5-sonnet-20240620-v1:0": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"eu.anthropic.claude-3-5-sonnet-20241022-v2:0": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"eu.anthropic.claude-3-7-sonnet-20250219-v1:0": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"eu.anthropic.claude-3-haiku-20240307-v1:0": {
|
|
"input_cost_per_token": 2.5e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.25e-06,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"eu.anthropic.claude-3-opus-20240229-v1:0": {
|
|
"input_cost_per_token": 1.5e-05,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7.5e-05,
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"eu.anthropic.claude-3-sonnet-20240229-v1:0": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"eu.anthropic.claude-opus-4-1-20250805-v1:0": {
|
|
"cache_creation_input_token_cost": 1.875e-05,
|
|
"cache_read_input_token_cost": 1.5e-06,
|
|
"input_cost_per_token": 1.5e-05,
|
|
"litellm_provider": "bedrock_converse",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 32000,
|
|
"max_tokens": 32000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7.5e-05,
|
|
"search_context_cost_per_query": {
|
|
"search_context_size_high": 0.01,
|
|
"search_context_size_low": 0.01,
|
|
"search_context_size_medium": 0.01
|
|
},
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tool_use_system_prompt_tokens": 159
|
|
},
|
|
"eu.anthropic.claude-opus-4-20250514-v1:0": {
|
|
"cache_creation_input_token_cost": 1.875e-05,
|
|
"cache_read_input_token_cost": 1.5e-06,
|
|
"input_cost_per_token": 1.5e-05,
|
|
"litellm_provider": "bedrock_converse",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 32000,
|
|
"max_tokens": 32000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7.5e-05,
|
|
"search_context_cost_per_query": {
|
|
"search_context_size_high": 0.01,
|
|
"search_context_size_low": 0.01,
|
|
"search_context_size_medium": 0.01
|
|
},
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tool_use_system_prompt_tokens": 159
|
|
},
|
|
"eu.anthropic.claude-sonnet-4-20250514-v1:0": {
|
|
"cache_creation_input_token_cost": 3.75e-06,
|
|
"cache_read_input_token_cost": 3e-07,
|
|
"input_cost_per_token": 3e-06,
|
|
"input_cost_per_token_above_200k_tokens": 6e-06,
|
|
"output_cost_per_token_above_200k_tokens": 2.25e-05,
|
|
"cache_creation_input_token_cost_above_200k_tokens": 7.5e-06,
|
|
"cache_read_input_token_cost_above_200k_tokens": 6e-07,
|
|
"litellm_provider": "bedrock_converse",
|
|
"max_input_tokens": 1000000,
|
|
"max_output_tokens": 64000,
|
|
"max_tokens": 64000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"search_context_cost_per_query": {
|
|
"search_context_size_high": 0.01,
|
|
"search_context_size_low": 0.01,
|
|
"search_context_size_medium": 0.01
|
|
},
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tool_use_system_prompt_tokens": 159
|
|
},
|
|
"eu.anthropic.claude-sonnet-4-5-20250929-v1:0": {
|
|
"cache_creation_input_token_cost": 4.125e-06,
|
|
"cache_read_input_token_cost": 3.3e-07,
|
|
"input_cost_per_token": 3.3e-06,
|
|
"input_cost_per_token_above_200k_tokens": 6.6e-06,
|
|
"output_cost_per_token_above_200k_tokens": 2.475e-05,
|
|
"cache_creation_input_token_cost_above_200k_tokens": 8.25e-06,
|
|
"cache_read_input_token_cost_above_200k_tokens": 6.6e-07,
|
|
"litellm_provider": "bedrock_converse",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 64000,
|
|
"max_tokens": 64000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.65e-05,
|
|
"search_context_cost_per_query": {
|
|
"search_context_size_high": 0.01,
|
|
"search_context_size_low": 0.01,
|
|
"search_context_size_medium": 0.01
|
|
},
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tool_use_system_prompt_tokens": 346
|
|
},
|
|
"eu.meta.llama3-2-1b-instruct-v1:0": {
|
|
"input_cost_per_token": 1.3e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.3e-07,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": false
|
|
},
|
|
"eu.meta.llama3-2-3b-instruct-v1:0": {
|
|
"input_cost_per_token": 1.9e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.9e-07,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": false
|
|
},
|
|
"eu.mistral.pixtral-large-2502-v1:0": {
|
|
"input_cost_per_token": 2e-06,
|
|
"litellm_provider": "bedrock_converse",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-06,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": false
|
|
},
|
|
"fal_ai/bria/text-to-image/3.2": {
|
|
"litellm_provider": "fal_ai",
|
|
"mode": "image_generation",
|
|
"output_cost_per_image": 0.0398,
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"fal_ai/fal-ai/flux-pro/v1.1": {
|
|
"litellm_provider": "fal_ai",
|
|
"mode": "image_generation",
|
|
"output_cost_per_image": 0.04,
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"fal_ai/fal-ai/flux-pro/v1.1-ultra": {
|
|
"litellm_provider": "fal_ai",
|
|
"mode": "image_generation",
|
|
"output_cost_per_image": 0.06,
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"fal_ai/fal-ai/flux/schnell": {
|
|
"litellm_provider": "fal_ai",
|
|
"mode": "image_generation",
|
|
"output_cost_per_image": 0.003,
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"fal_ai/fal-ai/bytedance/seedream/v3/text-to-image": {
|
|
"litellm_provider": "fal_ai",
|
|
"mode": "image_generation",
|
|
"output_cost_per_image": 0.03,
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"fal_ai/fal-ai/bytedance/dreamina/v3.1/text-to-image": {
|
|
"litellm_provider": "fal_ai",
|
|
"mode": "image_generation",
|
|
"output_cost_per_image": 0.03,
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"fal_ai/fal-ai/ideogram/v3": {
|
|
"litellm_provider": "fal_ai",
|
|
"mode": "image_generation",
|
|
"output_cost_per_image": 0.06,
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"fal_ai/fal-ai/imagen4/preview": {
|
|
"litellm_provider": "fal_ai",
|
|
"mode": "image_generation",
|
|
"output_cost_per_image": 0.0398,
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"fal_ai/fal-ai/imagen4/preview/fast": {
|
|
"litellm_provider": "fal_ai",
|
|
"mode": "image_generation",
|
|
"output_cost_per_image": 0.02,
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"fal_ai/fal-ai/imagen4/preview/ultra": {
|
|
"litellm_provider": "fal_ai",
|
|
"mode": "image_generation",
|
|
"output_cost_per_image": 0.06,
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"fal_ai/fal-ai/recraft/v3/text-to-image": {
|
|
"litellm_provider": "fal_ai",
|
|
"mode": "image_generation",
|
|
"output_cost_per_image": 0.0398,
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"fal_ai/fal-ai/stable-diffusion-v35-medium": {
|
|
"litellm_provider": "fal_ai",
|
|
"mode": "image_generation",
|
|
"output_cost_per_image": 0.0398,
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"featherless_ai/featherless-ai/Qwerky-72B": {
|
|
"litellm_provider": "featherless_ai",
|
|
"max_input_tokens": 32768,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 32768,
|
|
"mode": "chat"
|
|
},
|
|
"featherless_ai/featherless-ai/Qwerky-QwQ-32B": {
|
|
"litellm_provider": "featherless_ai",
|
|
"max_input_tokens": 32768,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 32768,
|
|
"mode": "chat"
|
|
},
|
|
"fireworks-ai-4.1b-to-16b": {
|
|
"input_cost_per_token": 2e-07,
|
|
"litellm_provider": "fireworks_ai",
|
|
"output_cost_per_token": 2e-07
|
|
},
|
|
"fireworks-ai-56b-to-176b": {
|
|
"input_cost_per_token": 1.2e-06,
|
|
"litellm_provider": "fireworks_ai",
|
|
"output_cost_per_token": 1.2e-06
|
|
},
|
|
"fireworks-ai-above-16b": {
|
|
"input_cost_per_token": 9e-07,
|
|
"litellm_provider": "fireworks_ai",
|
|
"output_cost_per_token": 9e-07
|
|
},
|
|
"fireworks-ai-default": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "fireworks_ai",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"fireworks-ai-embedding-150m-to-350m": {
|
|
"input_cost_per_token": 1.6e-08,
|
|
"litellm_provider": "fireworks_ai-embedding-models",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"fireworks-ai-embedding-up-to-150m": {
|
|
"input_cost_per_token": 8e-09,
|
|
"litellm_provider": "fireworks_ai-embedding-models",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"fireworks-ai-moe-up-to-56b": {
|
|
"input_cost_per_token": 5e-07,
|
|
"litellm_provider": "fireworks_ai",
|
|
"output_cost_per_token": 5e-07
|
|
},
|
|
"fireworks-ai-up-to-4b": {
|
|
"input_cost_per_token": 2e-07,
|
|
"litellm_provider": "fireworks_ai",
|
|
"output_cost_per_token": 2e-07
|
|
},
|
|
"fireworks_ai/WhereIsAI/UAE-Large-V1": {
|
|
"input_cost_per_token": 1.6e-08,
|
|
"litellm_provider": "fireworks_ai-embedding-models",
|
|
"max_input_tokens": 512,
|
|
"max_tokens": 512,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0,
|
|
"source": "https://fireworks.ai/pricing"
|
|
},
|
|
"fireworks_ai/accounts/fireworks/models/deepseek-coder-v2-instruct": {
|
|
"input_cost_per_token": 1.2e-06,
|
|
"litellm_provider": "fireworks_ai",
|
|
"max_input_tokens": 65536,
|
|
"max_output_tokens": 65536,
|
|
"max_tokens": 65536,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.2e-06,
|
|
"source": "https://fireworks.ai/pricing",
|
|
"supports_function_calling": false,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": false
|
|
},
|
|
"fireworks_ai/accounts/fireworks/models/deepseek-r1": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "fireworks_ai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 20480,
|
|
"max_tokens": 20480,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 8e-06,
|
|
"source": "https://fireworks.ai/pricing",
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": false
|
|
},
|
|
"fireworks_ai/accounts/fireworks/models/deepseek-r1-0528": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "fireworks_ai",
|
|
"max_input_tokens": 160000,
|
|
"max_output_tokens": 160000,
|
|
"max_tokens": 160000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 8e-06,
|
|
"source": "https://fireworks.ai/pricing",
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": false
|
|
},
|
|
"fireworks_ai/accounts/fireworks/models/deepseek-r1-basic": {
|
|
"input_cost_per_token": 5.5e-07,
|
|
"litellm_provider": "fireworks_ai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 20480,
|
|
"max_tokens": 20480,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.19e-06,
|
|
"source": "https://fireworks.ai/pricing",
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": false
|
|
},
|
|
"fireworks_ai/accounts/fireworks/models/deepseek-v3": {
|
|
"input_cost_per_token": 9e-07,
|
|
"litellm_provider": "fireworks_ai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 9e-07,
|
|
"source": "https://fireworks.ai/pricing",
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": false
|
|
},
|
|
"fireworks_ai/accounts/fireworks/models/deepseek-v3-0324": {
|
|
"input_cost_per_token": 9e-07,
|
|
"litellm_provider": "fireworks_ai",
|
|
"max_input_tokens": 163840,
|
|
"max_output_tokens": 163840,
|
|
"max_tokens": 163840,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 9e-07,
|
|
"source": "https://fireworks.ai/models/fireworks/deepseek-v3-0324",
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": false
|
|
},
|
|
"fireworks_ai/accounts/fireworks/models/deepseek-v3p1": {
|
|
"input_cost_per_token": 5.6e-07,
|
|
"litellm_provider": "fireworks_ai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.68e-06,
|
|
"source": "https://fireworks.ai/pricing",
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"fireworks_ai/accounts/fireworks/models/deepseek-v3p1-terminus": {
|
|
"input_cost_per_token": 5.6e-07,
|
|
"litellm_provider": "fireworks_ai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.68e-06,
|
|
"source": "https://fireworks.ai/pricing",
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"fireworks_ai/accounts/fireworks/models/deepseek-v3p2": {
|
|
"input_cost_per_token": 1.2e-06,
|
|
"litellm_provider": "fireworks_ai",
|
|
"max_input_tokens": 163840,
|
|
"max_output_tokens": 163840,
|
|
"max_tokens": 163840,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.2e-06,
|
|
"source": "https://fireworks.ai/models/fireworks/deepseek-v3p2",
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"fireworks_ai/accounts/fireworks/models/firefunction-v2": {
|
|
"input_cost_per_token": 9e-07,
|
|
"litellm_provider": "fireworks_ai",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 9e-07,
|
|
"source": "https://fireworks.ai/pricing",
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"fireworks_ai/accounts/fireworks/models/glm-4p5": {
|
|
"input_cost_per_token": 5.5e-07,
|
|
"litellm_provider": "fireworks_ai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 96000,
|
|
"max_tokens": 96000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.19e-06,
|
|
"source": "https://fireworks.ai/models/fireworks/glm-4p5",
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"fireworks_ai/accounts/fireworks/models/glm-4p5-air": {
|
|
"input_cost_per_token": 2.2e-07,
|
|
"litellm_provider": "fireworks_ai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 96000,
|
|
"max_tokens": 96000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 8.8e-07,
|
|
"source": "https://artificialanalysis.ai/models/glm-4-5-air",
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"fireworks_ai/accounts/fireworks/models/glm-4p6": {
|
|
"input_cost_per_token": 0.55e-06,
|
|
"output_cost_per_token": 2.19e-06,
|
|
"litellm_provider": "fireworks_ai",
|
|
"max_input_tokens": 202800,
|
|
"max_output_tokens": 202800,
|
|
"max_tokens": 202800,
|
|
"mode": "chat",
|
|
"source": "https://fireworks.ai/pricing",
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"fireworks_ai/accounts/fireworks/models/gpt-oss-120b": {
|
|
"input_cost_per_token": 1.5e-07,
|
|
"litellm_provider": "fireworks_ai",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-07,
|
|
"source": "https://fireworks.ai/pricing",
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"fireworks_ai/accounts/fireworks/models/gpt-oss-20b": {
|
|
"input_cost_per_token": 5e-08,
|
|
"litellm_provider": "fireworks_ai",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2e-07,
|
|
"source": "https://fireworks.ai/pricing",
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"fireworks_ai/accounts/fireworks/models/kimi-k2-instruct": {
|
|
"input_cost_per_token": 6e-07,
|
|
"litellm_provider": "fireworks_ai",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.5e-06,
|
|
"source": "https://fireworks.ai/models/fireworks/kimi-k2-instruct",
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"fireworks_ai/accounts/fireworks/models/kimi-k2-instruct-0905": {
|
|
"input_cost_per_token": 6e-07,
|
|
"litellm_provider": "fireworks_ai",
|
|
"max_input_tokens": 262144,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 262144,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.5e-06,
|
|
"source": "https://app.fireworks.ai/models/fireworks/kimi-k2-instruct-0905",
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"fireworks_ai/accounts/fireworks/models/kimi-k2-thinking": {
|
|
"input_cost_per_token": 6e-07,
|
|
"litellm_provider": "fireworks_ai",
|
|
"max_input_tokens": 262144,
|
|
"max_output_tokens": 262144,
|
|
"max_tokens": 262144,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.5e-06,
|
|
"source": "https://fireworks.ai/pricing",
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_web_search": true
|
|
},
|
|
"fireworks_ai/accounts/fireworks/models/llama-v3p1-405b-instruct": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "fireworks_ai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-06,
|
|
"source": "https://fireworks.ai/pricing",
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"fireworks_ai/accounts/fireworks/models/llama-v3p1-8b-instruct": {
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "fireworks_ai",
|
|
"max_input_tokens": 16384,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-07,
|
|
"source": "https://fireworks.ai/pricing",
|
|
"supports_function_calling": false,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": false
|
|
},
|
|
"fireworks_ai/accounts/fireworks/models/llama-v3p2-11b-vision-instruct": {
|
|
"input_cost_per_token": 2e-07,
|
|
"litellm_provider": "fireworks_ai",
|
|
"max_input_tokens": 16384,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2e-07,
|
|
"source": "https://fireworks.ai/pricing",
|
|
"supports_function_calling": false,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": false,
|
|
"supports_vision": true
|
|
},
|
|
"fireworks_ai/accounts/fireworks/models/llama-v3p2-1b-instruct": {
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "fireworks_ai",
|
|
"max_input_tokens": 16384,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-07,
|
|
"source": "https://fireworks.ai/pricing",
|
|
"supports_function_calling": false,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": false
|
|
},
|
|
"fireworks_ai/accounts/fireworks/models/llama-v3p2-3b-instruct": {
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "fireworks_ai",
|
|
"max_input_tokens": 16384,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-07,
|
|
"source": "https://fireworks.ai/pricing",
|
|
"supports_function_calling": false,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": false
|
|
},
|
|
"fireworks_ai/accounts/fireworks/models/llama-v3p2-90b-vision-instruct": {
|
|
"input_cost_per_token": 9e-07,
|
|
"litellm_provider": "fireworks_ai",
|
|
"max_input_tokens": 16384,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 9e-07,
|
|
"source": "https://fireworks.ai/pricing",
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": false,
|
|
"supports_vision": true
|
|
},
|
|
"fireworks_ai/accounts/fireworks/models/llama4-maverick-instruct-basic": {
|
|
"input_cost_per_token": 2.2e-07,
|
|
"litellm_provider": "fireworks_ai",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 8.8e-07,
|
|
"source": "https://fireworks.ai/pricing",
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": false
|
|
},
|
|
"fireworks_ai/accounts/fireworks/models/llama4-scout-instruct-basic": {
|
|
"input_cost_per_token": 1.5e-07,
|
|
"litellm_provider": "fireworks_ai",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-07,
|
|
"source": "https://fireworks.ai/pricing",
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": false
|
|
},
|
|
"fireworks_ai/accounts/fireworks/models/mixtral-8x22b-instruct-hf": {
|
|
"input_cost_per_token": 1.2e-06,
|
|
"litellm_provider": "fireworks_ai",
|
|
"max_input_tokens": 65536,
|
|
"max_output_tokens": 65536,
|
|
"max_tokens": 65536,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.2e-06,
|
|
"source": "https://fireworks.ai/pricing",
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"fireworks_ai/accounts/fireworks/models/qwen2-72b-instruct": {
|
|
"input_cost_per_token": 9e-07,
|
|
"litellm_provider": "fireworks_ai",
|
|
"max_input_tokens": 32768,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 9e-07,
|
|
"source": "https://fireworks.ai/pricing",
|
|
"supports_function_calling": false,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": false
|
|
},
|
|
"fireworks_ai/accounts/fireworks/models/qwen2p5-coder-32b-instruct": {
|
|
"input_cost_per_token": 9e-07,
|
|
"litellm_provider": "fireworks_ai",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 9e-07,
|
|
"source": "https://fireworks.ai/pricing",
|
|
"supports_function_calling": false,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": false
|
|
},
|
|
"fireworks_ai/accounts/fireworks/models/yi-large": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "fireworks_ai",
|
|
"max_input_tokens": 32768,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-06,
|
|
"source": "https://fireworks.ai/pricing",
|
|
"supports_function_calling": false,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": false
|
|
},
|
|
"fireworks_ai/nomic-ai/nomic-embed-text-v1": {
|
|
"input_cost_per_token": 8e-09,
|
|
"litellm_provider": "fireworks_ai-embedding-models",
|
|
"max_input_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0,
|
|
"source": "https://fireworks.ai/pricing"
|
|
},
|
|
"fireworks_ai/nomic-ai/nomic-embed-text-v1.5": {
|
|
"input_cost_per_token": 8e-09,
|
|
"litellm_provider": "fireworks_ai-embedding-models",
|
|
"max_input_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0,
|
|
"source": "https://fireworks.ai/pricing"
|
|
},
|
|
"fireworks_ai/thenlper/gte-base": {
|
|
"input_cost_per_token": 8e-09,
|
|
"litellm_provider": "fireworks_ai-embedding-models",
|
|
"max_input_tokens": 512,
|
|
"max_tokens": 512,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0,
|
|
"source": "https://fireworks.ai/pricing"
|
|
},
|
|
"fireworks_ai/thenlper/gte-large": {
|
|
"input_cost_per_token": 1.6e-08,
|
|
"litellm_provider": "fireworks_ai-embedding-models",
|
|
"max_input_tokens": 512,
|
|
"max_tokens": 512,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0,
|
|
"source": "https://fireworks.ai/pricing"
|
|
},
|
|
"friendliai/meta-llama-3.1-70b-instruct": {
|
|
"input_cost_per_token": 6e-07,
|
|
"litellm_provider": "friendliai",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-07,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"friendliai/meta-llama-3.1-8b-instruct": {
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "friendliai",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-07,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"ft:babbage-002": {
|
|
"input_cost_per_token": 1.6e-06,
|
|
"input_cost_per_token_batches": 2e-07,
|
|
"litellm_provider": "text-completion-openai",
|
|
"max_input_tokens": 16384,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 16384,
|
|
"mode": "completion",
|
|
"output_cost_per_token": 1.6e-06,
|
|
"output_cost_per_token_batches": 2e-07
|
|
},
|
|
"ft:davinci-002": {
|
|
"input_cost_per_token": 1.2e-05,
|
|
"input_cost_per_token_batches": 1e-06,
|
|
"litellm_provider": "text-completion-openai",
|
|
"max_input_tokens": 16384,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 16384,
|
|
"mode": "completion",
|
|
"output_cost_per_token": 1.2e-05,
|
|
"output_cost_per_token_batches": 1e-06
|
|
},
|
|
"ft:gpt-3.5-turbo": {
|
|
"input_cost_per_token": 3e-06,
|
|
"input_cost_per_token_batches": 1.5e-06,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 16385,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-06,
|
|
"output_cost_per_token_batches": 3e-06,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"ft:gpt-3.5-turbo-0125": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 16385,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-06,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"ft:gpt-3.5-turbo-0613": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-06,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"ft:gpt-3.5-turbo-1106": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 16385,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-06,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"ft:gpt-4-0613": {
|
|
"input_cost_per_token": 3e-05,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-05,
|
|
"source": "OpenAI needs to add pricing for this ft model, will be updated when added by OpenAI. Defaulting to base model pricing",
|
|
"supports_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"ft:gpt-4o-2024-08-06": {
|
|
"cache_read_input_token_cost": 1.875e-06,
|
|
"input_cost_per_token": 3.75e-06,
|
|
"input_cost_per_token_batches": 1.875e-06,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"output_cost_per_token_batches": 7.5e-06,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"ft:gpt-4o-2024-11-20": {
|
|
"cache_creation_input_token_cost": 1.875e-06,
|
|
"input_cost_per_token": 3.75e-06,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"ft:gpt-4o-mini-2024-07-18": {
|
|
"cache_read_input_token_cost": 1.5e-07,
|
|
"input_cost_per_token": 3e-07,
|
|
"input_cost_per_token_batches": 1.5e-07,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.2e-06,
|
|
"output_cost_per_token_batches": 6e-07,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"ft:gpt-4.1-2025-04-14": {
|
|
"cache_read_input_token_cost": 7.5e-07,
|
|
"input_cost_per_token": 3e-06,
|
|
"input_cost_per_token_batches": 1.5e-06,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 1047576,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.2e-05,
|
|
"output_cost_per_token_batches": 6e-06,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"ft:gpt-4.1-mini-2025-04-14": {
|
|
"cache_read_input_token_cost": 2e-07,
|
|
"input_cost_per_token": 8e-07,
|
|
"input_cost_per_token_batches": 4e-07,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 1047576,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3.2e-06,
|
|
"output_cost_per_token_batches": 1.6e-06,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"ft:gpt-4.1-nano-2025-04-14": {
|
|
"cache_read_input_token_cost": 5e-08,
|
|
"input_cost_per_token": 2e-07,
|
|
"input_cost_per_token_batches": 1e-07,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 1047576,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 8e-07,
|
|
"output_cost_per_token_batches": 4e-07,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"ft:o4-mini-2025-04-16": {
|
|
"cache_read_input_token_cost": 1e-06,
|
|
"input_cost_per_token": 4e-06,
|
|
"input_cost_per_token_batches": 2e-06,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 100000,
|
|
"max_tokens": 100000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.6e-05,
|
|
"output_cost_per_token_batches": 8e-06,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": false,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"gemini-1.0-pro": {
|
|
"input_cost_per_character": 1.25e-07,
|
|
"input_cost_per_image": 0.0025,
|
|
"input_cost_per_token": 5e-07,
|
|
"input_cost_per_video_per_second": 0.002,
|
|
"litellm_provider": "vertex_ai-language-models",
|
|
"max_input_tokens": 32760,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_character": 3.75e-07,
|
|
"output_cost_per_token": 1.5e-06,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/pricing#google_models",
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"gemini-1.0-pro-001": {
|
|
"deprecation_date": "2025-04-09",
|
|
"input_cost_per_character": 1.25e-07,
|
|
"input_cost_per_image": 0.0025,
|
|
"input_cost_per_token": 5e-07,
|
|
"input_cost_per_video_per_second": 0.002,
|
|
"litellm_provider": "vertex_ai-language-models",
|
|
"max_input_tokens": 32760,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_character": 3.75e-07,
|
|
"output_cost_per_token": 1.5e-06,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models",
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"gemini-1.0-pro-002": {
|
|
"deprecation_date": "2025-04-09",
|
|
"input_cost_per_character": 1.25e-07,
|
|
"input_cost_per_image": 0.0025,
|
|
"input_cost_per_token": 5e-07,
|
|
"input_cost_per_video_per_second": 0.002,
|
|
"litellm_provider": "vertex_ai-language-models",
|
|
"max_input_tokens": 32760,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_character": 3.75e-07,
|
|
"output_cost_per_token": 1.5e-06,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models",
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"gemini-1.0-pro-vision": {
|
|
"input_cost_per_image": 0.0025,
|
|
"input_cost_per_token": 5e-07,
|
|
"litellm_provider": "vertex_ai-vision-models",
|
|
"max_images_per_prompt": 16,
|
|
"max_input_tokens": 16384,
|
|
"max_output_tokens": 2048,
|
|
"max_tokens": 2048,
|
|
"max_video_length": 2,
|
|
"max_videos_per_prompt": 1,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-06,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models",
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"gemini-1.0-pro-vision-001": {
|
|
"deprecation_date": "2025-04-09",
|
|
"input_cost_per_image": 0.0025,
|
|
"input_cost_per_token": 5e-07,
|
|
"litellm_provider": "vertex_ai-vision-models",
|
|
"max_images_per_prompt": 16,
|
|
"max_input_tokens": 16384,
|
|
"max_output_tokens": 2048,
|
|
"max_tokens": 2048,
|
|
"max_video_length": 2,
|
|
"max_videos_per_prompt": 1,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-06,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models",
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"gemini-1.0-ultra": {
|
|
"input_cost_per_character": 1.25e-07,
|
|
"input_cost_per_image": 0.0025,
|
|
"input_cost_per_token": 5e-07,
|
|
"input_cost_per_video_per_second": 0.002,
|
|
"litellm_provider": "vertex_ai-language-models",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 2048,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_character": 3.75e-07,
|
|
"output_cost_per_token": 1.5e-06,
|
|
"source": "As of Jun, 2024. There is no available doc on vertex ai pricing gemini-1.0-ultra-001. Using gemini-1.0-pro pricing. Got max_tokens info here: https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models",
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"gemini-1.0-ultra-001": {
|
|
"input_cost_per_character": 1.25e-07,
|
|
"input_cost_per_image": 0.0025,
|
|
"input_cost_per_token": 5e-07,
|
|
"input_cost_per_video_per_second": 0.002,
|
|
"litellm_provider": "vertex_ai-language-models",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 2048,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_character": 3.75e-07,
|
|
"output_cost_per_token": 1.5e-06,
|
|
"source": "As of Jun, 2024. There is no available doc on vertex ai pricing gemini-1.0-ultra-001. Using gemini-1.0-pro pricing. Got max_tokens info here: https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models",
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"gemini-1.5-flash": {
|
|
"input_cost_per_audio_per_second": 2e-06,
|
|
"input_cost_per_audio_per_second_above_128k_tokens": 4e-06,
|
|
"input_cost_per_character": 1.875e-08,
|
|
"input_cost_per_character_above_128k_tokens": 2.5e-07,
|
|
"input_cost_per_image": 2e-05,
|
|
"input_cost_per_image_above_128k_tokens": 4e-05,
|
|
"input_cost_per_token": 7.5e-08,
|
|
"input_cost_per_token_above_128k_tokens": 1e-06,
|
|
"input_cost_per_video_per_second": 2e-05,
|
|
"input_cost_per_video_per_second_above_128k_tokens": 4e-05,
|
|
"litellm_provider": "vertex_ai-language-models",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1000000,
|
|
"max_output_tokens": 8192,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 8192,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_character": 7.5e-08,
|
|
"output_cost_per_character_above_128k_tokens": 1.5e-07,
|
|
"output_cost_per_token": 3e-07,
|
|
"output_cost_per_token_above_128k_tokens": 6e-07,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models",
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"gemini-1.5-flash-001": {
|
|
"deprecation_date": "2025-05-24",
|
|
"input_cost_per_audio_per_second": 2e-06,
|
|
"input_cost_per_audio_per_second_above_128k_tokens": 4e-06,
|
|
"input_cost_per_character": 1.875e-08,
|
|
"input_cost_per_character_above_128k_tokens": 2.5e-07,
|
|
"input_cost_per_image": 2e-05,
|
|
"input_cost_per_image_above_128k_tokens": 4e-05,
|
|
"input_cost_per_token": 7.5e-08,
|
|
"input_cost_per_token_above_128k_tokens": 1e-06,
|
|
"input_cost_per_video_per_second": 2e-05,
|
|
"input_cost_per_video_per_second_above_128k_tokens": 4e-05,
|
|
"litellm_provider": "vertex_ai-language-models",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1000000,
|
|
"max_output_tokens": 8192,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 8192,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_character": 7.5e-08,
|
|
"output_cost_per_character_above_128k_tokens": 1.5e-07,
|
|
"output_cost_per_token": 3e-07,
|
|
"output_cost_per_token_above_128k_tokens": 6e-07,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models",
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"gemini-1.5-flash-002": {
|
|
"deprecation_date": "2025-09-24",
|
|
"input_cost_per_audio_per_second": 2e-06,
|
|
"input_cost_per_audio_per_second_above_128k_tokens": 4e-06,
|
|
"input_cost_per_character": 1.875e-08,
|
|
"input_cost_per_character_above_128k_tokens": 2.5e-07,
|
|
"input_cost_per_image": 2e-05,
|
|
"input_cost_per_image_above_128k_tokens": 4e-05,
|
|
"input_cost_per_token": 7.5e-08,
|
|
"input_cost_per_token_above_128k_tokens": 1e-06,
|
|
"input_cost_per_video_per_second": 2e-05,
|
|
"input_cost_per_video_per_second_above_128k_tokens": 4e-05,
|
|
"litellm_provider": "vertex_ai-language-models",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 8192,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 8192,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_character": 7.5e-08,
|
|
"output_cost_per_character_above_128k_tokens": 1.5e-07,
|
|
"output_cost_per_token": 3e-07,
|
|
"output_cost_per_token_above_128k_tokens": 6e-07,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#gemini-1.5-flash",
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"gemini-1.5-flash-exp-0827": {
|
|
"input_cost_per_audio_per_second": 2e-06,
|
|
"input_cost_per_audio_per_second_above_128k_tokens": 4e-06,
|
|
"input_cost_per_character": 1.875e-08,
|
|
"input_cost_per_character_above_128k_tokens": 2.5e-07,
|
|
"input_cost_per_image": 2e-05,
|
|
"input_cost_per_image_above_128k_tokens": 4e-05,
|
|
"input_cost_per_token": 4.688e-09,
|
|
"input_cost_per_token_above_128k_tokens": 1e-06,
|
|
"input_cost_per_video_per_second": 2e-05,
|
|
"input_cost_per_video_per_second_above_128k_tokens": 4e-05,
|
|
"litellm_provider": "vertex_ai-language-models",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1000000,
|
|
"max_output_tokens": 8192,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 8192,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_character": 1.875e-08,
|
|
"output_cost_per_character_above_128k_tokens": 3.75e-08,
|
|
"output_cost_per_token": 4.6875e-09,
|
|
"output_cost_per_token_above_128k_tokens": 9.375e-09,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models",
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"gemini-1.5-flash-preview-0514": {
|
|
"input_cost_per_audio_per_second": 2e-06,
|
|
"input_cost_per_audio_per_second_above_128k_tokens": 4e-06,
|
|
"input_cost_per_character": 1.875e-08,
|
|
"input_cost_per_character_above_128k_tokens": 2.5e-07,
|
|
"input_cost_per_image": 2e-05,
|
|
"input_cost_per_image_above_128k_tokens": 4e-05,
|
|
"input_cost_per_token": 7.5e-08,
|
|
"input_cost_per_token_above_128k_tokens": 1e-06,
|
|
"input_cost_per_video_per_second": 2e-05,
|
|
"input_cost_per_video_per_second_above_128k_tokens": 4e-05,
|
|
"litellm_provider": "vertex_ai-language-models",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1000000,
|
|
"max_output_tokens": 8192,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 8192,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_character": 1.875e-08,
|
|
"output_cost_per_character_above_128k_tokens": 3.75e-08,
|
|
"output_cost_per_token": 4.6875e-09,
|
|
"output_cost_per_token_above_128k_tokens": 9.375e-09,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models",
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"gemini-1.5-pro": {
|
|
"input_cost_per_audio_per_second": 3.125e-05,
|
|
"input_cost_per_audio_per_second_above_128k_tokens": 6.25e-05,
|
|
"input_cost_per_character": 3.125e-07,
|
|
"input_cost_per_character_above_128k_tokens": 6.25e-07,
|
|
"input_cost_per_image": 0.00032875,
|
|
"input_cost_per_image_above_128k_tokens": 0.0006575,
|
|
"input_cost_per_token": 1.25e-06,
|
|
"input_cost_per_token_above_128k_tokens": 2.5e-06,
|
|
"input_cost_per_video_per_second": 0.00032875,
|
|
"input_cost_per_video_per_second_above_128k_tokens": 0.0006575,
|
|
"litellm_provider": "vertex_ai-language-models",
|
|
"max_input_tokens": 2097152,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_character": 1.25e-06,
|
|
"output_cost_per_character_above_128k_tokens": 2.5e-06,
|
|
"output_cost_per_token": 5e-06,
|
|
"output_cost_per_token_above_128k_tokens": 1e-05,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models",
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"gemini-1.5-pro-001": {
|
|
"deprecation_date": "2025-05-24",
|
|
"input_cost_per_audio_per_second": 3.125e-05,
|
|
"input_cost_per_audio_per_second_above_128k_tokens": 6.25e-05,
|
|
"input_cost_per_character": 3.125e-07,
|
|
"input_cost_per_character_above_128k_tokens": 6.25e-07,
|
|
"input_cost_per_image": 0.00032875,
|
|
"input_cost_per_image_above_128k_tokens": 0.0006575,
|
|
"input_cost_per_token": 1.25e-06,
|
|
"input_cost_per_token_above_128k_tokens": 2.5e-06,
|
|
"input_cost_per_video_per_second": 0.00032875,
|
|
"input_cost_per_video_per_second_above_128k_tokens": 0.0006575,
|
|
"litellm_provider": "vertex_ai-language-models",
|
|
"max_input_tokens": 1000000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_character": 1.25e-06,
|
|
"output_cost_per_character_above_128k_tokens": 2.5e-06,
|
|
"output_cost_per_token": 5e-06,
|
|
"output_cost_per_token_above_128k_tokens": 1e-05,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models",
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"gemini-1.5-pro-002": {
|
|
"deprecation_date": "2025-09-24",
|
|
"input_cost_per_audio_per_second": 3.125e-05,
|
|
"input_cost_per_audio_per_second_above_128k_tokens": 6.25e-05,
|
|
"input_cost_per_character": 3.125e-07,
|
|
"input_cost_per_character_above_128k_tokens": 6.25e-07,
|
|
"input_cost_per_image": 0.00032875,
|
|
"input_cost_per_image_above_128k_tokens": 0.0006575,
|
|
"input_cost_per_token": 1.25e-06,
|
|
"input_cost_per_token_above_128k_tokens": 2.5e-06,
|
|
"input_cost_per_video_per_second": 0.00032875,
|
|
"input_cost_per_video_per_second_above_128k_tokens": 0.0006575,
|
|
"litellm_provider": "vertex_ai-language-models",
|
|
"max_input_tokens": 2097152,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_character": 1.25e-06,
|
|
"output_cost_per_character_above_128k_tokens": 2.5e-06,
|
|
"output_cost_per_token": 5e-06,
|
|
"output_cost_per_token_above_128k_tokens": 1e-05,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#gemini-1.5-pro",
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"gemini-1.5-pro-preview-0215": {
|
|
"input_cost_per_audio_per_second": 3.125e-05,
|
|
"input_cost_per_audio_per_second_above_128k_tokens": 6.25e-05,
|
|
"input_cost_per_character": 3.125e-07,
|
|
"input_cost_per_character_above_128k_tokens": 6.25e-07,
|
|
"input_cost_per_image": 0.00032875,
|
|
"input_cost_per_image_above_128k_tokens": 0.0006575,
|
|
"input_cost_per_token": 7.8125e-08,
|
|
"input_cost_per_token_above_128k_tokens": 1.5625e-07,
|
|
"input_cost_per_video_per_second": 0.00032875,
|
|
"input_cost_per_video_per_second_above_128k_tokens": 0.0006575,
|
|
"litellm_provider": "vertex_ai-language-models",
|
|
"max_input_tokens": 1000000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_character": 1.25e-06,
|
|
"output_cost_per_character_above_128k_tokens": 2.5e-06,
|
|
"output_cost_per_token": 3.125e-07,
|
|
"output_cost_per_token_above_128k_tokens": 6.25e-07,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models",
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"gemini-1.5-pro-preview-0409": {
|
|
"input_cost_per_audio_per_second": 3.125e-05,
|
|
"input_cost_per_audio_per_second_above_128k_tokens": 6.25e-05,
|
|
"input_cost_per_character": 3.125e-07,
|
|
"input_cost_per_character_above_128k_tokens": 6.25e-07,
|
|
"input_cost_per_image": 0.00032875,
|
|
"input_cost_per_image_above_128k_tokens": 0.0006575,
|
|
"input_cost_per_token": 7.8125e-08,
|
|
"input_cost_per_token_above_128k_tokens": 1.5625e-07,
|
|
"input_cost_per_video_per_second": 0.00032875,
|
|
"input_cost_per_video_per_second_above_128k_tokens": 0.0006575,
|
|
"litellm_provider": "vertex_ai-language-models",
|
|
"max_input_tokens": 1000000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_character": 1.25e-06,
|
|
"output_cost_per_character_above_128k_tokens": 2.5e-06,
|
|
"output_cost_per_token": 3.125e-07,
|
|
"output_cost_per_token_above_128k_tokens": 6.25e-07,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models",
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"gemini-1.5-pro-preview-0514": {
|
|
"input_cost_per_audio_per_second": 3.125e-05,
|
|
"input_cost_per_audio_per_second_above_128k_tokens": 6.25e-05,
|
|
"input_cost_per_character": 3.125e-07,
|
|
"input_cost_per_character_above_128k_tokens": 6.25e-07,
|
|
"input_cost_per_image": 0.00032875,
|
|
"input_cost_per_image_above_128k_tokens": 0.0006575,
|
|
"input_cost_per_token": 7.8125e-08,
|
|
"input_cost_per_token_above_128k_tokens": 1.5625e-07,
|
|
"input_cost_per_video_per_second": 0.00032875,
|
|
"input_cost_per_video_per_second_above_128k_tokens": 0.0006575,
|
|
"litellm_provider": "vertex_ai-language-models",
|
|
"max_input_tokens": 1000000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_character": 1.25e-06,
|
|
"output_cost_per_character_above_128k_tokens": 2.5e-06,
|
|
"output_cost_per_token": 3.125e-07,
|
|
"output_cost_per_token_above_128k_tokens": 6.25e-07,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models",
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"gemini-2.0-flash": {
|
|
"cache_read_input_token_cost": 2.5e-08,
|
|
"input_cost_per_audio_token": 7e-07,
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "vertex_ai-language-models",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 8192,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 8192,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4e-07,
|
|
"source": "https://ai.google.dev/pricing#2_0flash",
|
|
"supported_modalities": [
|
|
"text",
|
|
"image",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supports_audio_input": true,
|
|
"supports_audio_output": true,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_url_context": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true
|
|
},
|
|
"gemini-2.0-flash-001": {
|
|
"cache_read_input_token_cost": 3.75e-08,
|
|
"deprecation_date": "2026-02-05",
|
|
"input_cost_per_audio_token": 1e-06,
|
|
"input_cost_per_token": 1.5e-07,
|
|
"litellm_provider": "vertex_ai-language-models",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 8192,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 8192,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-07,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/pricing",
|
|
"supported_modalities": [
|
|
"text",
|
|
"image",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supports_audio_output": true,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true
|
|
},
|
|
"gemini-2.0-flash-exp": {
|
|
"cache_read_input_token_cost": 3.75e-08,
|
|
"input_cost_per_audio_per_second": 0,
|
|
"input_cost_per_audio_per_second_above_128k_tokens": 0,
|
|
"input_cost_per_character": 0,
|
|
"input_cost_per_character_above_128k_tokens": 0,
|
|
"input_cost_per_image": 0,
|
|
"input_cost_per_image_above_128k_tokens": 0,
|
|
"input_cost_per_token": 1.5e-07,
|
|
"input_cost_per_token_above_128k_tokens": 0,
|
|
"input_cost_per_video_per_second": 0,
|
|
"input_cost_per_video_per_second_above_128k_tokens": 0,
|
|
"litellm_provider": "vertex_ai-language-models",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 8192,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 8192,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_character": 0,
|
|
"output_cost_per_character_above_128k_tokens": 0,
|
|
"output_cost_per_token": 6e-07,
|
|
"output_cost_per_token_above_128k_tokens": 0,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/pricing",
|
|
"supported_modalities": [
|
|
"text",
|
|
"image",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supports_audio_output": true,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true
|
|
},
|
|
"gemini-2.0-flash-lite": {
|
|
"cache_read_input_token_cost": 1.875e-08,
|
|
"input_cost_per_audio_token": 7.5e-08,
|
|
"input_cost_per_token": 7.5e-08,
|
|
"litellm_provider": "vertex_ai-language-models",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 8192,
|
|
"max_pdf_size_mb": 50,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-07,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#gemini-2.0-flash",
|
|
"supported_modalities": [
|
|
"text",
|
|
"image",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_audio_output": true,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true
|
|
},
|
|
"gemini-2.0-flash-lite-001": {
|
|
"cache_read_input_token_cost": 1.875e-08,
|
|
"deprecation_date": "2026-02-25",
|
|
"input_cost_per_audio_token": 7.5e-08,
|
|
"input_cost_per_token": 7.5e-08,
|
|
"litellm_provider": "vertex_ai-language-models",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 8192,
|
|
"max_pdf_size_mb": 50,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-07,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#gemini-2.0-flash",
|
|
"supported_modalities": [
|
|
"text",
|
|
"image",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_audio_output": true,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true
|
|
},
|
|
"gemini-2.0-flash-live-preview-04-09": {
|
|
"cache_read_input_token_cost": 7.5e-08,
|
|
"input_cost_per_audio_token": 3e-06,
|
|
"input_cost_per_image": 3e-06,
|
|
"input_cost_per_token": 5e-07,
|
|
"input_cost_per_video_per_second": 3e-06,
|
|
"litellm_provider": "vertex_ai-language-models",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 65535,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 65535,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_audio_token": 1.2e-05,
|
|
"output_cost_per_token": 2e-06,
|
|
"rpm": 10,
|
|
"source": "https://cloud.google.com/vertex-ai/docs/generative-ai/model-reference/gemini#gemini-2-0-flash-live-preview-04-09",
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/completions"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text",
|
|
"audio"
|
|
],
|
|
"supports_audio_output": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_url_context": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true,
|
|
"tpm": 250000
|
|
},
|
|
"gemini-2.0-flash-preview-image-generation": {
|
|
"cache_read_input_token_cost": 2.5e-08,
|
|
"input_cost_per_audio_token": 7e-07,
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "vertex_ai-language-models",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 8192,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 8192,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4e-07,
|
|
"source": "https://ai.google.dev/pricing#2_0flash",
|
|
"supported_modalities": [
|
|
"text",
|
|
"image",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supports_audio_input": true,
|
|
"supports_audio_output": true,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true
|
|
},
|
|
"gemini-2.0-flash-thinking-exp": {
|
|
"cache_read_input_token_cost": 0.0,
|
|
"input_cost_per_audio_per_second": 0,
|
|
"input_cost_per_audio_per_second_above_128k_tokens": 0,
|
|
"input_cost_per_character": 0,
|
|
"input_cost_per_character_above_128k_tokens": 0,
|
|
"input_cost_per_image": 0,
|
|
"input_cost_per_image_above_128k_tokens": 0,
|
|
"input_cost_per_token": 0,
|
|
"input_cost_per_token_above_128k_tokens": 0,
|
|
"input_cost_per_video_per_second": 0,
|
|
"input_cost_per_video_per_second_above_128k_tokens": 0,
|
|
"litellm_provider": "vertex_ai-language-models",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 8192,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 8192,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_character": 0,
|
|
"output_cost_per_character_above_128k_tokens": 0,
|
|
"output_cost_per_token": 0,
|
|
"output_cost_per_token_above_128k_tokens": 0,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#gemini-2.0-flash",
|
|
"supported_modalities": [
|
|
"text",
|
|
"image",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supports_audio_output": true,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true
|
|
},
|
|
"gemini-2.0-flash-thinking-exp-01-21": {
|
|
"cache_read_input_token_cost": 0.0,
|
|
"input_cost_per_audio_per_second": 0,
|
|
"input_cost_per_audio_per_second_above_128k_tokens": 0,
|
|
"input_cost_per_character": 0,
|
|
"input_cost_per_character_above_128k_tokens": 0,
|
|
"input_cost_per_image": 0,
|
|
"input_cost_per_image_above_128k_tokens": 0,
|
|
"input_cost_per_token": 0,
|
|
"input_cost_per_token_above_128k_tokens": 0,
|
|
"input_cost_per_video_per_second": 0,
|
|
"input_cost_per_video_per_second_above_128k_tokens": 0,
|
|
"litellm_provider": "vertex_ai-language-models",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 65536,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 65536,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_character": 0,
|
|
"output_cost_per_character_above_128k_tokens": 0,
|
|
"output_cost_per_token": 0,
|
|
"output_cost_per_token_above_128k_tokens": 0,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#gemini-2.0-flash",
|
|
"supported_modalities": [
|
|
"text",
|
|
"image",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supports_audio_output": false,
|
|
"supports_function_calling": false,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": false,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true
|
|
},
|
|
"gemini-2.0-pro-exp-02-05": {
|
|
"cache_read_input_token_cost": 3.125e-07,
|
|
"input_cost_per_token": 1.25e-06,
|
|
"input_cost_per_token_above_200k_tokens": 2.5e-06,
|
|
"litellm_provider": "vertex_ai-language-models",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 2097152,
|
|
"max_output_tokens": 8192,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 8192,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-05,
|
|
"output_cost_per_token_above_200k_tokens": 1.5e-05,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/pricing",
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/completions"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_audio_input": true,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_video_input": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true
|
|
},
|
|
"gemini-2.5-flash": {
|
|
"cache_read_input_token_cost": 3e-08,
|
|
"input_cost_per_audio_token": 1e-06,
|
|
"input_cost_per_token": 3e-07,
|
|
"litellm_provider": "vertex_ai-language-models",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 65535,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 65535,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_reasoning_token": 2.5e-06,
|
|
"output_cost_per_token": 2.5e-06,
|
|
"source": "https://ai.google.dev/gemini-api/docs/models#gemini-2.5-flash-preview",
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/completions",
|
|
"/v1/batch"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_audio_output": false,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_url_context": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true
|
|
},
|
|
"gemini-2.5-flash-image": {
|
|
"cache_read_input_token_cost": 3e-08,
|
|
"input_cost_per_audio_token": 1e-06,
|
|
"input_cost_per_token": 3e-07,
|
|
"litellm_provider": "vertex_ai-language-models",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 32768,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"max_pdf_size_mb": 30,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "image_generation",
|
|
"output_cost_per_image": 0.039,
|
|
"output_cost_per_reasoning_token": 2.5e-06,
|
|
"output_cost_per_token": 2.5e-06,
|
|
"rpm": 100000,
|
|
"source": "https://ai.google.dev/gemini-api/docs/pricing#gemini-2.5-flash-image",
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/completions",
|
|
"/v1/batch"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supports_audio_output": false,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_url_context": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": false,
|
|
"tpm": 8000000
|
|
},
|
|
"gemini-2.5-flash-image-preview": {
|
|
"cache_read_input_token_cost": 7.5e-08,
|
|
"input_cost_per_audio_token": 1e-06,
|
|
"input_cost_per_token": 3e-07,
|
|
"litellm_provider": "vertex_ai-language-models",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 65535,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 65535,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "image_generation",
|
|
"output_cost_per_image": 0.039,
|
|
"output_cost_per_reasoning_token": 3e-05,
|
|
"output_cost_per_token": 3e-05,
|
|
"rpm": 100000,
|
|
"source": "https://ai.google.dev/gemini-api/docs/models#gemini-2.5-flash-preview",
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/completions",
|
|
"/v1/batch"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supports_audio_output": false,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_url_context": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true,
|
|
"tpm": 8000000
|
|
},
|
|
"gemini-3-pro-image-preview": {
|
|
"input_cost_per_image": 0.0011,
|
|
"input_cost_per_token": 2e-06,
|
|
"input_cost_per_token_batches": 1e-06,
|
|
"litellm_provider": "vertex_ai-language-models",
|
|
"max_input_tokens": 65536,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 65536,
|
|
"mode": "image_generation",
|
|
"output_cost_per_image": 0.134,
|
|
"output_cost_per_image_token": 1.2e-04,
|
|
"output_cost_per_token": 1.2e-05,
|
|
"output_cost_per_token_batches": 6e-06,
|
|
"source": "https://ai.google.dev/gemini-api/docs/pricing",
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/completions",
|
|
"/v1/batch"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supports_function_calling": false,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true
|
|
},
|
|
"gemini-2.5-flash-lite": {
|
|
"cache_read_input_token_cost": 2.5e-08,
|
|
"input_cost_per_audio_token": 5e-07,
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "vertex_ai-language-models",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 65535,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 65535,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_reasoning_token": 4e-07,
|
|
"output_cost_per_token": 4e-07,
|
|
"source": "https://ai.google.dev/gemini-api/docs/models#gemini-2.5-flash-preview",
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/completions",
|
|
"/v1/batch"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_audio_output": false,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_url_context": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true
|
|
},
|
|
"gemini-2.5-flash-lite-preview-09-2025": {
|
|
"cache_read_input_token_cost": 2.5e-08,
|
|
"input_cost_per_audio_token": 3e-07,
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "vertex_ai-language-models",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 65535,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 65535,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_reasoning_token": 4e-07,
|
|
"output_cost_per_token": 4e-07,
|
|
"source": "https://developers.googleblog.com/en/continuing-to-bring-you-our-latest-models-with-an-improved-gemini-2-5-flash-and-flash-lite-release/",
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/completions",
|
|
"/v1/batch"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_audio_output": false,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_url_context": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true
|
|
},
|
|
"gemini-2.5-flash-preview-09-2025": {
|
|
"cache_read_input_token_cost": 7.5e-08,
|
|
"input_cost_per_audio_token": 1e-06,
|
|
"input_cost_per_token": 3e-07,
|
|
"litellm_provider": "vertex_ai-language-models",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 65535,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 65535,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_reasoning_token": 2.5e-06,
|
|
"output_cost_per_token": 2.5e-06,
|
|
"source": "https://developers.googleblog.com/en/continuing-to-bring-you-our-latest-models-with-an-improved-gemini-2-5-flash-and-flash-lite-release/",
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/completions",
|
|
"/v1/batch"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_audio_output": false,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_url_context": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true
|
|
},
|
|
"gemini-live-2.5-flash-preview-native-audio-09-2025": {
|
|
"cache_read_input_token_cost": 7.5e-08,
|
|
"input_cost_per_audio_token": 3e-06,
|
|
"input_cost_per_token": 3e-07,
|
|
"litellm_provider": "vertex_ai-language-models",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 65535,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 65535,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_audio_token": 1.2e-05,
|
|
"output_cost_per_token": 2e-06,
|
|
"source": "https://ai.google.dev/gemini-api/docs/pricing",
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/completions"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text",
|
|
"audio"
|
|
],
|
|
"supports_audio_input": true,
|
|
"supports_audio_output": true,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_url_context": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true
|
|
},
|
|
"gemini/gemini-live-2.5-flash-preview-native-audio-09-2025": {
|
|
"cache_read_input_token_cost": 7.5e-08,
|
|
"input_cost_per_audio_token": 3e-06,
|
|
"input_cost_per_token": 3e-07,
|
|
"litellm_provider": "gemini",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 65535,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 65535,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_audio_token": 1.2e-05,
|
|
"output_cost_per_token": 2e-06,
|
|
"rpm": 100000,
|
|
"source": "https://ai.google.dev/gemini-api/docs/pricing",
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/completions"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text",
|
|
"audio"
|
|
],
|
|
"supports_audio_input": true,
|
|
"supports_audio_output": true,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_url_context": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true,
|
|
"tpm": 8000000
|
|
},
|
|
"gemini-2.5-flash-lite-preview-06-17": {
|
|
"cache_read_input_token_cost": 2.5e-08,
|
|
"input_cost_per_audio_token": 5e-07,
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "vertex_ai-language-models",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 65535,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 65535,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_reasoning_token": 4e-07,
|
|
"output_cost_per_token": 4e-07,
|
|
"source": "https://ai.google.dev/gemini-api/docs/models#gemini-2.5-flash-preview",
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/completions",
|
|
"/v1/batch"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_audio_output": false,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_url_context": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true
|
|
},
|
|
"gemini-2.5-flash-preview-04-17": {
|
|
"cache_read_input_token_cost": 3.75e-08,
|
|
"input_cost_per_audio_token": 1e-06,
|
|
"input_cost_per_token": 1.5e-07,
|
|
"litellm_provider": "vertex_ai-language-models",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 65535,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 65535,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_reasoning_token": 3.5e-06,
|
|
"output_cost_per_token": 6e-07,
|
|
"source": "https://ai.google.dev/gemini-api/docs/models#gemini-2.5-flash-preview",
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/completions",
|
|
"/v1/batch"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_audio_output": false,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true
|
|
},
|
|
"gemini-2.5-flash-preview-05-20": {
|
|
"cache_read_input_token_cost": 7.5e-08,
|
|
"input_cost_per_audio_token": 1e-06,
|
|
"input_cost_per_token": 3e-07,
|
|
"litellm_provider": "vertex_ai-language-models",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 65535,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 65535,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_reasoning_token": 2.5e-06,
|
|
"output_cost_per_token": 2.5e-06,
|
|
"source": "https://ai.google.dev/gemini-api/docs/models#gemini-2.5-flash-preview",
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/completions",
|
|
"/v1/batch"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_audio_output": false,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_url_context": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true
|
|
},
|
|
"gemini-2.5-pro": {
|
|
"cache_read_input_token_cost": 1.25e-07,
|
|
"cache_creation_input_token_cost_above_200k_tokens": 2.5e-07,
|
|
"input_cost_per_token": 1.25e-06,
|
|
"input_cost_per_token_above_200k_tokens": 2.5e-06,
|
|
"litellm_provider": "vertex_ai-language-models",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 65535,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 65535,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-05,
|
|
"output_cost_per_token_above_200k_tokens": 1.5e-05,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/pricing",
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/completions"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_audio_input": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_video_input": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true
|
|
},
|
|
"gemini-3-pro-preview": {
|
|
"cache_read_input_token_cost": 2e-07,
|
|
"cache_read_input_token_cost_above_200k_tokens": 4e-07,
|
|
"cache_creation_input_token_cost_above_200k_tokens": 2.5e-07,
|
|
"input_cost_per_token": 2e-06,
|
|
"input_cost_per_token_above_200k_tokens": 4e-06,
|
|
"input_cost_per_token_batches": 1e-06,
|
|
"litellm_provider": "vertex_ai-language-models",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 65535,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 65535,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.2e-05,
|
|
"output_cost_per_token_above_200k_tokens": 1.8e-05,
|
|
"output_cost_per_token_batches": 6e-06,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/pricing",
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/completions",
|
|
"/v1/batch"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_audio_input": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_video_input": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true
|
|
},
|
|
"vertex_ai/gemini-3-pro-preview": {
|
|
"cache_read_input_token_cost": 2e-07,
|
|
"cache_read_input_token_cost_above_200k_tokens": 4e-07,
|
|
"cache_creation_input_token_cost_above_200k_tokens": 2.5e-07,
|
|
"input_cost_per_token": 2e-06,
|
|
"input_cost_per_token_above_200k_tokens": 4e-06,
|
|
"input_cost_per_token_batches": 1e-06,
|
|
"litellm_provider": "vertex_ai",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 65535,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 65535,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.2e-05,
|
|
"output_cost_per_token_above_200k_tokens": 1.8e-05,
|
|
"output_cost_per_token_batches": 6e-06,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/pricing",
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/completions",
|
|
"/v1/batch"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_audio_input": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_video_input": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true
|
|
},
|
|
"gemini-2.5-pro-exp-03-25": {
|
|
"cache_read_input_token_cost": 3.125e-07,
|
|
"input_cost_per_token": 1.25e-06,
|
|
"input_cost_per_token_above_200k_tokens": 2.5e-06,
|
|
"litellm_provider": "vertex_ai-language-models",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 65535,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 65535,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-05,
|
|
"output_cost_per_token_above_200k_tokens": 1.5e-05,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/pricing",
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/completions"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_audio_input": true,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_video_input": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true
|
|
},
|
|
"gemini-2.5-pro-preview-03-25": {
|
|
"cache_read_input_token_cost": 3.125e-07,
|
|
"input_cost_per_audio_token": 1.25e-06,
|
|
"input_cost_per_token": 1.25e-06,
|
|
"input_cost_per_token_above_200k_tokens": 2.5e-06,
|
|
"litellm_provider": "vertex_ai-language-models",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 65535,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 65535,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-05,
|
|
"output_cost_per_token_above_200k_tokens": 1.5e-05,
|
|
"source": "https://ai.google.dev/gemini-api/docs/models#gemini-2.5-flash-preview",
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/completions",
|
|
"/v1/batch"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_audio_output": false,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true
|
|
},
|
|
"gemini-2.5-pro-preview-05-06": {
|
|
"cache_read_input_token_cost": 3.125e-07,
|
|
"input_cost_per_audio_token": 1.25e-06,
|
|
"input_cost_per_token": 1.25e-06,
|
|
"input_cost_per_token_above_200k_tokens": 2.5e-06,
|
|
"litellm_provider": "vertex_ai-language-models",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 65535,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 65535,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-05,
|
|
"output_cost_per_token_above_200k_tokens": 1.5e-05,
|
|
"source": "https://ai.google.dev/gemini-api/docs/models#gemini-2.5-flash-preview",
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/completions",
|
|
"/v1/batch"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supported_regions": [
|
|
"global"
|
|
],
|
|
"supports_audio_output": false,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true
|
|
},
|
|
"gemini-2.5-pro-preview-06-05": {
|
|
"cache_read_input_token_cost": 3.125e-07,
|
|
"input_cost_per_audio_token": 1.25e-06,
|
|
"input_cost_per_token": 1.25e-06,
|
|
"input_cost_per_token_above_200k_tokens": 2.5e-06,
|
|
"litellm_provider": "vertex_ai-language-models",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 65535,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 65535,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-05,
|
|
"output_cost_per_token_above_200k_tokens": 1.5e-05,
|
|
"source": "https://ai.google.dev/gemini-api/docs/models#gemini-2.5-flash-preview",
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/completions",
|
|
"/v1/batch"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_audio_output": false,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true
|
|
},
|
|
"gemini-2.5-pro-preview-tts": {
|
|
"cache_read_input_token_cost": 3.125e-07,
|
|
"input_cost_per_audio_token": 7e-07,
|
|
"input_cost_per_token": 1.25e-06,
|
|
"input_cost_per_token_above_200k_tokens": 2.5e-06,
|
|
"litellm_provider": "vertex_ai-language-models",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 65535,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 65535,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-05,
|
|
"output_cost_per_token_above_200k_tokens": 1.5e-05,
|
|
"source": "https://ai.google.dev/gemini-api/docs/pricing#gemini-2.5-pro-preview",
|
|
"supported_modalities": [
|
|
"text"
|
|
],
|
|
"supported_output_modalities": [
|
|
"audio"
|
|
],
|
|
"supports_audio_output": false,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true
|
|
},
|
|
"gemini-embedding-001": {
|
|
"input_cost_per_token": 1.5e-07,
|
|
"litellm_provider": "vertex_ai-embedding-models",
|
|
"max_input_tokens": 2048,
|
|
"max_tokens": 2048,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0,
|
|
"output_vector_size": 3072,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models"
|
|
},
|
|
"gemini-flash-experimental": {
|
|
"input_cost_per_character": 0,
|
|
"input_cost_per_token": 0,
|
|
"litellm_provider": "vertex_ai-language-models",
|
|
"max_input_tokens": 1000000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_character": 0,
|
|
"output_cost_per_token": 0,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/multimodal/gemini-experimental",
|
|
"supports_function_calling": false,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"gemini-pro": {
|
|
"input_cost_per_character": 1.25e-07,
|
|
"input_cost_per_image": 0.0025,
|
|
"input_cost_per_token": 5e-07,
|
|
"input_cost_per_video_per_second": 0.002,
|
|
"litellm_provider": "vertex_ai-language-models",
|
|
"max_input_tokens": 32760,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_character": 3.75e-07,
|
|
"output_cost_per_token": 1.5e-06,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/pricing",
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"gemini-pro-experimental": {
|
|
"input_cost_per_character": 0,
|
|
"input_cost_per_token": 0,
|
|
"litellm_provider": "vertex_ai-language-models",
|
|
"max_input_tokens": 1000000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_character": 0,
|
|
"output_cost_per_token": 0,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/multimodal/gemini-experimental",
|
|
"supports_function_calling": false,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"gemini-pro-vision": {
|
|
"input_cost_per_image": 0.0025,
|
|
"input_cost_per_token": 5e-07,
|
|
"litellm_provider": "vertex_ai-vision-models",
|
|
"max_images_per_prompt": 16,
|
|
"max_input_tokens": 16384,
|
|
"max_output_tokens": 2048,
|
|
"max_tokens": 2048,
|
|
"max_video_length": 2,
|
|
"max_videos_per_prompt": 1,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-06,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models",
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"gemini/gemini-embedding-001": {
|
|
"input_cost_per_token": 1.5e-07,
|
|
"litellm_provider": "gemini",
|
|
"max_input_tokens": 2048,
|
|
"max_tokens": 2048,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0,
|
|
"output_vector_size": 3072,
|
|
"rpm": 10000,
|
|
"source": "https://ai.google.dev/gemini-api/docs/embeddings#model-versions",
|
|
"tpm": 10000000
|
|
},
|
|
"gemini/gemini-1.5-flash": {
|
|
"input_cost_per_token": 7.5e-08,
|
|
"input_cost_per_token_above_128k_tokens": 1.5e-07,
|
|
"litellm_provider": "gemini",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 8192,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 8192,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-07,
|
|
"output_cost_per_token_above_128k_tokens": 6e-07,
|
|
"rpm": 2000,
|
|
"source": "https://ai.google.dev/pricing",
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tpm": 4000000
|
|
},
|
|
"gemini/gemini-1.5-flash-001": {
|
|
"cache_creation_input_token_cost": 1e-06,
|
|
"cache_read_input_token_cost": 1.875e-08,
|
|
"deprecation_date": "2025-05-24",
|
|
"input_cost_per_token": 7.5e-08,
|
|
"input_cost_per_token_above_128k_tokens": 1.5e-07,
|
|
"litellm_provider": "gemini",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 8192,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 8192,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-07,
|
|
"output_cost_per_token_above_128k_tokens": 6e-07,
|
|
"rpm": 2000,
|
|
"source": "https://ai.google.dev/pricing",
|
|
"supports_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tpm": 4000000
|
|
},
|
|
"gemini/gemini-1.5-flash-002": {
|
|
"cache_creation_input_token_cost": 1e-06,
|
|
"cache_read_input_token_cost": 1.875e-08,
|
|
"deprecation_date": "2025-09-24",
|
|
"input_cost_per_token": 7.5e-08,
|
|
"input_cost_per_token_above_128k_tokens": 1.5e-07,
|
|
"litellm_provider": "gemini",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 8192,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 8192,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-07,
|
|
"output_cost_per_token_above_128k_tokens": 6e-07,
|
|
"rpm": 2000,
|
|
"source": "https://ai.google.dev/pricing",
|
|
"supports_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tpm": 4000000
|
|
},
|
|
"gemini/gemini-1.5-flash-8b": {
|
|
"input_cost_per_token": 0,
|
|
"input_cost_per_token_above_128k_tokens": 0,
|
|
"litellm_provider": "gemini",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 8192,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 8192,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0,
|
|
"output_cost_per_token_above_128k_tokens": 0,
|
|
"rpm": 4000,
|
|
"source": "https://ai.google.dev/pricing",
|
|
"supports_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tpm": 4000000
|
|
},
|
|
"gemini/gemini-1.5-flash-8b-exp-0827": {
|
|
"input_cost_per_token": 0,
|
|
"input_cost_per_token_above_128k_tokens": 0,
|
|
"litellm_provider": "gemini",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1000000,
|
|
"max_output_tokens": 8192,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 8192,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0,
|
|
"output_cost_per_token_above_128k_tokens": 0,
|
|
"rpm": 4000,
|
|
"source": "https://ai.google.dev/pricing",
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tpm": 4000000
|
|
},
|
|
"gemini/gemini-1.5-flash-8b-exp-0924": {
|
|
"input_cost_per_token": 0,
|
|
"input_cost_per_token_above_128k_tokens": 0,
|
|
"litellm_provider": "gemini",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 8192,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 8192,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0,
|
|
"output_cost_per_token_above_128k_tokens": 0,
|
|
"rpm": 4000,
|
|
"source": "https://ai.google.dev/pricing",
|
|
"supports_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tpm": 4000000
|
|
},
|
|
"gemini/gemini-1.5-flash-exp-0827": {
|
|
"input_cost_per_token": 0,
|
|
"input_cost_per_token_above_128k_tokens": 0,
|
|
"litellm_provider": "gemini",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 8192,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 8192,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0,
|
|
"output_cost_per_token_above_128k_tokens": 0,
|
|
"rpm": 2000,
|
|
"source": "https://ai.google.dev/pricing",
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tpm": 4000000
|
|
},
|
|
"gemini/gemini-1.5-flash-latest": {
|
|
"input_cost_per_token": 7.5e-08,
|
|
"input_cost_per_token_above_128k_tokens": 1.5e-07,
|
|
"litellm_provider": "gemini",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 8192,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 8192,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-07,
|
|
"output_cost_per_token_above_128k_tokens": 6e-07,
|
|
"rpm": 2000,
|
|
"source": "https://ai.google.dev/pricing",
|
|
"supports_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tpm": 4000000
|
|
},
|
|
"gemini/gemini-1.5-pro": {
|
|
"input_cost_per_token": 3.5e-06,
|
|
"input_cost_per_token_above_128k_tokens": 7e-06,
|
|
"litellm_provider": "gemini",
|
|
"max_input_tokens": 2097152,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.05e-05,
|
|
"output_cost_per_token_above_128k_tokens": 2.1e-05,
|
|
"rpm": 1000,
|
|
"source": "https://ai.google.dev/pricing",
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tpm": 4000000
|
|
},
|
|
"gemini/gemini-1.5-pro-001": {
|
|
"deprecation_date": "2025-05-24",
|
|
"input_cost_per_token": 3.5e-06,
|
|
"input_cost_per_token_above_128k_tokens": 7e-06,
|
|
"litellm_provider": "gemini",
|
|
"max_input_tokens": 2097152,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.05e-05,
|
|
"output_cost_per_token_above_128k_tokens": 2.1e-05,
|
|
"rpm": 1000,
|
|
"source": "https://ai.google.dev/pricing",
|
|
"supports_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tpm": 4000000
|
|
},
|
|
"gemini/gemini-1.5-pro-002": {
|
|
"deprecation_date": "2025-09-24",
|
|
"input_cost_per_token": 3.5e-06,
|
|
"input_cost_per_token_above_128k_tokens": 7e-06,
|
|
"litellm_provider": "gemini",
|
|
"max_input_tokens": 2097152,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.05e-05,
|
|
"output_cost_per_token_above_128k_tokens": 2.1e-05,
|
|
"rpm": 1000,
|
|
"source": "https://ai.google.dev/pricing",
|
|
"supports_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tpm": 4000000
|
|
},
|
|
"gemini/gemini-1.5-pro-exp-0801": {
|
|
"input_cost_per_token": 3.5e-06,
|
|
"input_cost_per_token_above_128k_tokens": 7e-06,
|
|
"litellm_provider": "gemini",
|
|
"max_input_tokens": 2097152,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.05e-05,
|
|
"output_cost_per_token_above_128k_tokens": 2.1e-05,
|
|
"rpm": 1000,
|
|
"source": "https://ai.google.dev/pricing",
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tpm": 4000000
|
|
},
|
|
"gemini/gemini-1.5-pro-exp-0827": {
|
|
"input_cost_per_token": 0,
|
|
"input_cost_per_token_above_128k_tokens": 0,
|
|
"litellm_provider": "gemini",
|
|
"max_input_tokens": 2097152,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0,
|
|
"output_cost_per_token_above_128k_tokens": 0,
|
|
"rpm": 1000,
|
|
"source": "https://ai.google.dev/pricing",
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tpm": 4000000
|
|
},
|
|
"gemini/gemini-1.5-pro-latest": {
|
|
"input_cost_per_token": 3.5e-06,
|
|
"input_cost_per_token_above_128k_tokens": 7e-06,
|
|
"litellm_provider": "gemini",
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.05e-06,
|
|
"output_cost_per_token_above_128k_tokens": 2.1e-05,
|
|
"rpm": 1000,
|
|
"source": "https://ai.google.dev/pricing",
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tpm": 4000000
|
|
},
|
|
"gemini/gemini-2.0-flash": {
|
|
"cache_read_input_token_cost": 2.5e-08,
|
|
"input_cost_per_audio_token": 7e-07,
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "gemini",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 8192,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 8192,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4e-07,
|
|
"rpm": 10000,
|
|
"source": "https://ai.google.dev/pricing#2_0flash",
|
|
"supported_modalities": [
|
|
"text",
|
|
"image",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supports_audio_input": true,
|
|
"supports_audio_output": true,
|
|
"supports_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_url_context": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true,
|
|
"tpm": 10000000
|
|
},
|
|
"gemini/gemini-2.0-flash-001": {
|
|
"cache_read_input_token_cost": 2.5e-08,
|
|
"input_cost_per_audio_token": 7e-07,
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "gemini",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 8192,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 8192,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4e-07,
|
|
"rpm": 10000,
|
|
"source": "https://ai.google.dev/pricing#2_0flash",
|
|
"supported_modalities": [
|
|
"text",
|
|
"image",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supports_audio_output": false,
|
|
"supports_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true,
|
|
"tpm": 10000000
|
|
},
|
|
"gemini/gemini-2.0-flash-exp": {
|
|
"cache_read_input_token_cost": 0.0,
|
|
"input_cost_per_audio_per_second": 0,
|
|
"input_cost_per_audio_per_second_above_128k_tokens": 0,
|
|
"input_cost_per_character": 0,
|
|
"input_cost_per_character_above_128k_tokens": 0,
|
|
"input_cost_per_image": 0,
|
|
"input_cost_per_image_above_128k_tokens": 0,
|
|
"input_cost_per_token": 0,
|
|
"input_cost_per_token_above_128k_tokens": 0,
|
|
"input_cost_per_video_per_second": 0,
|
|
"input_cost_per_video_per_second_above_128k_tokens": 0,
|
|
"litellm_provider": "gemini",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 8192,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 8192,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_character": 0,
|
|
"output_cost_per_character_above_128k_tokens": 0,
|
|
"output_cost_per_token": 0,
|
|
"output_cost_per_token_above_128k_tokens": 0,
|
|
"rpm": 10,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#gemini-2.0-flash",
|
|
"supported_modalities": [
|
|
"text",
|
|
"image",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supports_audio_output": true,
|
|
"supports_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true,
|
|
"tpm": 4000000
|
|
},
|
|
"gemini/gemini-2.0-flash-lite": {
|
|
"cache_read_input_token_cost": 1.875e-08,
|
|
"input_cost_per_audio_token": 7.5e-08,
|
|
"input_cost_per_token": 7.5e-08,
|
|
"litellm_provider": "gemini",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 8192,
|
|
"max_pdf_size_mb": 50,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-07,
|
|
"rpm": 4000,
|
|
"source": "https://ai.google.dev/gemini-api/docs/pricing#gemini-2.0-flash-lite",
|
|
"supported_modalities": [
|
|
"text",
|
|
"image",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_audio_output": true,
|
|
"supports_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true,
|
|
"tpm": 4000000
|
|
},
|
|
"gemini/gemini-2.0-flash-lite-preview-02-05": {
|
|
"cache_read_input_token_cost": 1.875e-08,
|
|
"input_cost_per_audio_token": 7.5e-08,
|
|
"input_cost_per_token": 7.5e-08,
|
|
"litellm_provider": "gemini",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 8192,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 8192,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-07,
|
|
"rpm": 60000,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#gemini-2.0-flash-lite",
|
|
"supported_modalities": [
|
|
"text",
|
|
"image",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_audio_output": false,
|
|
"supports_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true,
|
|
"tpm": 10000000
|
|
},
|
|
"gemini/gemini-2.0-flash-live-001": {
|
|
"cache_read_input_token_cost": 7.5e-08,
|
|
"input_cost_per_audio_token": 2.1e-06,
|
|
"input_cost_per_image": 2.1e-06,
|
|
"input_cost_per_token": 3.5e-07,
|
|
"input_cost_per_video_per_second": 2.1e-06,
|
|
"litellm_provider": "gemini",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 65535,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 65535,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_audio_token": 8.5e-06,
|
|
"output_cost_per_token": 1.5e-06,
|
|
"rpm": 10,
|
|
"source": "https://ai.google.dev/gemini-api/docs/models#gemini-2-0-flash-live-001",
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/completions"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text",
|
|
"audio"
|
|
],
|
|
"supports_audio_output": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_url_context": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true,
|
|
"tpm": 250000
|
|
},
|
|
"gemini/gemini-2.0-flash-preview-image-generation": {
|
|
"cache_read_input_token_cost": 2.5e-08,
|
|
"input_cost_per_audio_token": 7e-07,
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "gemini",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 8192,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 8192,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4e-07,
|
|
"rpm": 10000,
|
|
"source": "https://ai.google.dev/pricing#2_0flash",
|
|
"supported_modalities": [
|
|
"text",
|
|
"image",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supports_audio_input": true,
|
|
"supports_audio_output": true,
|
|
"supports_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true,
|
|
"tpm": 10000000
|
|
},
|
|
"gemini/gemini-2.0-flash-thinking-exp": {
|
|
"cache_read_input_token_cost": 0.0,
|
|
"input_cost_per_audio_per_second": 0,
|
|
"input_cost_per_audio_per_second_above_128k_tokens": 0,
|
|
"input_cost_per_character": 0,
|
|
"input_cost_per_character_above_128k_tokens": 0,
|
|
"input_cost_per_image": 0,
|
|
"input_cost_per_image_above_128k_tokens": 0,
|
|
"input_cost_per_token": 0,
|
|
"input_cost_per_token_above_128k_tokens": 0,
|
|
"input_cost_per_video_per_second": 0,
|
|
"input_cost_per_video_per_second_above_128k_tokens": 0,
|
|
"litellm_provider": "gemini",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 65536,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 8192,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_character": 0,
|
|
"output_cost_per_character_above_128k_tokens": 0,
|
|
"output_cost_per_token": 0,
|
|
"output_cost_per_token_above_128k_tokens": 0,
|
|
"rpm": 10,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#gemini-2.0-flash",
|
|
"supported_modalities": [
|
|
"text",
|
|
"image",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supports_audio_output": true,
|
|
"supports_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true,
|
|
"tpm": 4000000
|
|
},
|
|
"gemini/gemini-2.0-flash-thinking-exp-01-21": {
|
|
"cache_read_input_token_cost": 0.0,
|
|
"input_cost_per_audio_per_second": 0,
|
|
"input_cost_per_audio_per_second_above_128k_tokens": 0,
|
|
"input_cost_per_character": 0,
|
|
"input_cost_per_character_above_128k_tokens": 0,
|
|
"input_cost_per_image": 0,
|
|
"input_cost_per_image_above_128k_tokens": 0,
|
|
"input_cost_per_token": 0,
|
|
"input_cost_per_token_above_128k_tokens": 0,
|
|
"input_cost_per_video_per_second": 0,
|
|
"input_cost_per_video_per_second_above_128k_tokens": 0,
|
|
"litellm_provider": "gemini",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 65536,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 8192,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_character": 0,
|
|
"output_cost_per_character_above_128k_tokens": 0,
|
|
"output_cost_per_token": 0,
|
|
"output_cost_per_token_above_128k_tokens": 0,
|
|
"rpm": 10,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#gemini-2.0-flash",
|
|
"supported_modalities": [
|
|
"text",
|
|
"image",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supports_audio_output": true,
|
|
"supports_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true,
|
|
"tpm": 4000000
|
|
},
|
|
"gemini/gemini-2.0-pro-exp-02-05": {
|
|
"cache_read_input_token_cost": 0.0,
|
|
"input_cost_per_audio_per_second": 0,
|
|
"input_cost_per_audio_per_second_above_128k_tokens": 0,
|
|
"input_cost_per_character": 0,
|
|
"input_cost_per_character_above_128k_tokens": 0,
|
|
"input_cost_per_image": 0,
|
|
"input_cost_per_image_above_128k_tokens": 0,
|
|
"input_cost_per_token": 0,
|
|
"input_cost_per_token_above_128k_tokens": 0,
|
|
"input_cost_per_video_per_second": 0,
|
|
"input_cost_per_video_per_second_above_128k_tokens": 0,
|
|
"litellm_provider": "gemini",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 2097152,
|
|
"max_output_tokens": 8192,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 8192,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_character": 0,
|
|
"output_cost_per_character_above_128k_tokens": 0,
|
|
"output_cost_per_token": 0,
|
|
"output_cost_per_token_above_128k_tokens": 0,
|
|
"rpm": 2,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/pricing",
|
|
"supports_audio_input": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_video_input": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true,
|
|
"tpm": 1000000
|
|
},
|
|
"gemini/gemini-2.5-flash": {
|
|
"cache_read_input_token_cost": 3e-08,
|
|
"input_cost_per_audio_token": 1e-06,
|
|
"input_cost_per_token": 3e-07,
|
|
"litellm_provider": "gemini",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 65535,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 65535,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_reasoning_token": 2.5e-06,
|
|
"output_cost_per_token": 2.5e-06,
|
|
"rpm": 100000,
|
|
"source": "https://ai.google.dev/gemini-api/docs/models#gemini-2.5-flash-preview",
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/completions",
|
|
"/v1/batch"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_audio_output": false,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_url_context": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true,
|
|
"tpm": 8000000
|
|
},
|
|
"gemini/gemini-2.5-flash-image": {
|
|
"cache_read_input_token_cost": 3e-08,
|
|
"input_cost_per_audio_token": 1e-06,
|
|
"input_cost_per_token": 3e-07,
|
|
"litellm_provider": "vertex_ai-language-models",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"supports_reasoning": false,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 32768,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"max_pdf_size_mb": 30,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "image_generation",
|
|
"output_cost_per_image": 0.039,
|
|
"output_cost_per_reasoning_token": 2.5e-06,
|
|
"output_cost_per_token": 2.5e-06,
|
|
"rpm": 100000,
|
|
"source": "https://ai.google.dev/gemini-api/docs/pricing#gemini-2.5-flash-image",
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/completions",
|
|
"/v1/batch"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supports_audio_output": false,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_url_context": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true,
|
|
"tpm": 8000000
|
|
},
|
|
"gemini/gemini-2.5-flash-image-preview": {
|
|
"cache_read_input_token_cost": 7.5e-08,
|
|
"input_cost_per_audio_token": 1e-06,
|
|
"input_cost_per_token": 3e-07,
|
|
"litellm_provider": "gemini",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 65535,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 65535,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "image_generation",
|
|
"output_cost_per_image": 0.039,
|
|
"output_cost_per_reasoning_token": 3e-05,
|
|
"output_cost_per_token": 3e-05,
|
|
"rpm": 100000,
|
|
"source": "https://ai.google.dev/gemini-api/docs/models#gemini-2.5-flash-preview",
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/completions",
|
|
"/v1/batch"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supports_audio_output": false,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_url_context": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true,
|
|
"tpm": 8000000
|
|
},
|
|
"gemini/gemini-3-pro-image-preview": {
|
|
"input_cost_per_image": 0.0011,
|
|
"input_cost_per_token": 2e-06,
|
|
"input_cost_per_token_batches": 1e-06,
|
|
"litellm_provider": "gemini",
|
|
"max_input_tokens": 65536,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 65536,
|
|
"mode": "image_generation",
|
|
"output_cost_per_image": 0.134,
|
|
"output_cost_per_image_token": 1.2e-04,
|
|
"output_cost_per_token": 1.2e-05,
|
|
"rpm": 1000,
|
|
"tpm": 4000000,
|
|
"output_cost_per_token_batches": 6e-06,
|
|
"source": "https://ai.google.dev/gemini-api/docs/pricing",
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/completions",
|
|
"/v1/batch"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supports_function_calling": false,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true
|
|
},
|
|
"gemini/gemini-2.5-flash-lite": {
|
|
"cache_read_input_token_cost": 2.5e-08,
|
|
"input_cost_per_audio_token": 5e-07,
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "gemini",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 65535,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 65535,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_reasoning_token": 4e-07,
|
|
"output_cost_per_token": 4e-07,
|
|
"rpm": 15,
|
|
"source": "https://ai.google.dev/gemini-api/docs/models#gemini-2.5-flash-lite",
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/completions",
|
|
"/v1/batch"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_audio_output": false,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_url_context": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true,
|
|
"tpm": 250000
|
|
},
|
|
"gemini/gemini-2.5-flash-lite-preview-09-2025": {
|
|
"cache_read_input_token_cost": 2.5e-08,
|
|
"input_cost_per_audio_token": 3e-07,
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "gemini",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 65535,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 65535,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_reasoning_token": 4e-07,
|
|
"output_cost_per_token": 4e-07,
|
|
"rpm": 15,
|
|
"source": "https://developers.googleblog.com/en/continuing-to-bring-you-our-latest-models-with-an-improved-gemini-2-5-flash-and-flash-lite-release/",
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/completions",
|
|
"/v1/batch"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_audio_output": false,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_url_context": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true,
|
|
"tpm": 250000
|
|
},
|
|
"gemini/gemini-2.5-flash-preview-09-2025": {
|
|
"cache_read_input_token_cost": 7.5e-08,
|
|
"input_cost_per_audio_token": 1e-06,
|
|
"input_cost_per_token": 3e-07,
|
|
"litellm_provider": "gemini",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 65535,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 65535,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_reasoning_token": 2.5e-06,
|
|
"output_cost_per_token": 2.5e-06,
|
|
"rpm": 15,
|
|
"source": "https://developers.googleblog.com/en/continuing-to-bring-you-our-latest-models-with-an-improved-gemini-2-5-flash-and-flash-lite-release/",
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/completions",
|
|
"/v1/batch"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_audio_output": false,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_url_context": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true,
|
|
"tpm": 250000
|
|
},
|
|
"gemini/gemini-flash-latest": {
|
|
"cache_read_input_token_cost": 7.5e-08,
|
|
"input_cost_per_audio_token": 1e-06,
|
|
"input_cost_per_token": 3e-07,
|
|
"litellm_provider": "gemini",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 65535,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 65535,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_reasoning_token": 2.5e-06,
|
|
"output_cost_per_token": 2.5e-06,
|
|
"rpm": 15,
|
|
"source": "https://developers.googleblog.com/en/continuing-to-bring-you-our-latest-models-with-an-improved-gemini-2-5-flash-and-flash-lite-release/",
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/completions",
|
|
"/v1/batch"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_audio_output": false,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_url_context": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true,
|
|
"tpm": 250000
|
|
},
|
|
"gemini/gemini-flash-lite-latest": {
|
|
"cache_read_input_token_cost": 2.5e-08,
|
|
"input_cost_per_audio_token": 3e-07,
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "gemini",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 65535,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 65535,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_reasoning_token": 4e-07,
|
|
"output_cost_per_token": 4e-07,
|
|
"rpm": 15,
|
|
"source": "https://developers.googleblog.com/en/continuing-to-bring-you-our-latest-models-with-an-improved-gemini-2-5-flash-and-flash-lite-release/",
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/completions",
|
|
"/v1/batch"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_audio_output": false,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_url_context": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true,
|
|
"tpm": 250000
|
|
},
|
|
"gemini/gemini-2.5-flash-lite-preview-06-17": {
|
|
"cache_read_input_token_cost": 2.5e-08,
|
|
"input_cost_per_audio_token": 5e-07,
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "gemini",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 65535,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 65535,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_reasoning_token": 4e-07,
|
|
"output_cost_per_token": 4e-07,
|
|
"rpm": 15,
|
|
"source": "https://ai.google.dev/gemini-api/docs/models#gemini-2.5-flash-lite",
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/completions",
|
|
"/v1/batch"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_audio_output": false,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_url_context": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true,
|
|
"tpm": 250000
|
|
},
|
|
"gemini/gemini-2.5-flash-preview-04-17": {
|
|
"cache_read_input_token_cost": 3.75e-08,
|
|
"input_cost_per_audio_token": 1e-06,
|
|
"input_cost_per_token": 1.5e-07,
|
|
"litellm_provider": "gemini",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 65535,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 65535,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_reasoning_token": 3.5e-06,
|
|
"output_cost_per_token": 6e-07,
|
|
"rpm": 10,
|
|
"source": "https://ai.google.dev/gemini-api/docs/models#gemini-2.5-flash-preview",
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/completions"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_audio_output": false,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true,
|
|
"tpm": 250000
|
|
},
|
|
"gemini/gemini-2.5-flash-preview-05-20": {
|
|
"cache_read_input_token_cost": 7.5e-08,
|
|
"input_cost_per_audio_token": 1e-06,
|
|
"input_cost_per_token": 3e-07,
|
|
"litellm_provider": "gemini",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 65535,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 65535,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_reasoning_token": 2.5e-06,
|
|
"output_cost_per_token": 2.5e-06,
|
|
"rpm": 10,
|
|
"source": "https://ai.google.dev/gemini-api/docs/models#gemini-2.5-flash-preview",
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/completions"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_audio_output": false,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_url_context": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true,
|
|
"tpm": 250000
|
|
},
|
|
"gemini/gemini-2.5-flash-preview-tts": {
|
|
"cache_read_input_token_cost": 3.75e-08,
|
|
"input_cost_per_audio_token": 1e-06,
|
|
"input_cost_per_token": 1.5e-07,
|
|
"litellm_provider": "gemini",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 65535,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 65535,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_reasoning_token": 3.5e-06,
|
|
"output_cost_per_token": 6e-07,
|
|
"rpm": 10,
|
|
"source": "https://ai.google.dev/gemini-api/docs/models#gemini-2.5-flash-preview",
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/completions"
|
|
],
|
|
"supported_modalities": [
|
|
"text"
|
|
],
|
|
"supported_output_modalities": [
|
|
"audio"
|
|
],
|
|
"supports_audio_output": false,
|
|
"supports_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true,
|
|
"tpm": 250000
|
|
},
|
|
"gemini/gemini-2.5-pro": {
|
|
"cache_read_input_token_cost": 3.125e-07,
|
|
"input_cost_per_token": 1.25e-06,
|
|
"input_cost_per_token_above_200k_tokens": 2.5e-06,
|
|
"litellm_provider": "gemini",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 65535,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 65535,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-05,
|
|
"output_cost_per_token_above_200k_tokens": 1.5e-05,
|
|
"rpm": 2000,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/pricing",
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/completions"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_audio_input": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_video_input": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true,
|
|
"tpm": 800000
|
|
},
|
|
"gemini/gemini-3-pro-preview": {
|
|
"cache_read_input_token_cost": 2e-07,
|
|
"cache_read_input_token_cost_above_200k_tokens": 4e-07,
|
|
"input_cost_per_token": 2e-06,
|
|
"input_cost_per_token_above_200k_tokens": 4e-06,
|
|
"input_cost_per_token_batches": 1e-06,
|
|
"litellm_provider": "gemini",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 65535,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 65535,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.2e-05,
|
|
"output_cost_per_token_above_200k_tokens": 1.8e-05,
|
|
"output_cost_per_token_batches": 6e-06,
|
|
"rpm": 2000,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/pricing",
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/completions",
|
|
"/v1/batch"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_audio_input": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_video_input": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true,
|
|
"tpm": 800000
|
|
},
|
|
"gemini/gemini-2.5-pro-exp-03-25": {
|
|
"cache_read_input_token_cost": 0.0,
|
|
"input_cost_per_token": 0.0,
|
|
"input_cost_per_token_above_200k_tokens": 0.0,
|
|
"litellm_provider": "gemini",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 65535,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 65535,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.0,
|
|
"output_cost_per_token_above_200k_tokens": 0.0,
|
|
"rpm": 5,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/pricing",
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/completions"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_audio_input": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_video_input": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true,
|
|
"tpm": 250000
|
|
},
|
|
"gemini/gemini-2.5-pro-preview-03-25": {
|
|
"cache_read_input_token_cost": 3.125e-07,
|
|
"input_cost_per_audio_token": 7e-07,
|
|
"input_cost_per_token": 1.25e-06,
|
|
"input_cost_per_token_above_200k_tokens": 2.5e-06,
|
|
"litellm_provider": "gemini",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 65535,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 65535,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-05,
|
|
"output_cost_per_token_above_200k_tokens": 1.5e-05,
|
|
"rpm": 10000,
|
|
"source": "https://ai.google.dev/gemini-api/docs/pricing#gemini-2.5-pro-preview",
|
|
"supported_modalities": [
|
|
"text",
|
|
"image",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_audio_output": false,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true,
|
|
"tpm": 10000000
|
|
},
|
|
"gemini/gemini-2.5-pro-preview-05-06": {
|
|
"cache_read_input_token_cost": 3.125e-07,
|
|
"input_cost_per_audio_token": 7e-07,
|
|
"input_cost_per_token": 1.25e-06,
|
|
"input_cost_per_token_above_200k_tokens": 2.5e-06,
|
|
"litellm_provider": "gemini",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 65535,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 65535,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-05,
|
|
"output_cost_per_token_above_200k_tokens": 1.5e-05,
|
|
"rpm": 10000,
|
|
"source": "https://ai.google.dev/gemini-api/docs/pricing#gemini-2.5-pro-preview",
|
|
"supported_modalities": [
|
|
"text",
|
|
"image",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_audio_output": false,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_url_context": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true,
|
|
"tpm": 10000000
|
|
},
|
|
"gemini/gemini-2.5-pro-preview-06-05": {
|
|
"cache_read_input_token_cost": 3.125e-07,
|
|
"input_cost_per_audio_token": 7e-07,
|
|
"input_cost_per_token": 1.25e-06,
|
|
"input_cost_per_token_above_200k_tokens": 2.5e-06,
|
|
"litellm_provider": "gemini",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 65535,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 65535,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-05,
|
|
"output_cost_per_token_above_200k_tokens": 1.5e-05,
|
|
"rpm": 10000,
|
|
"source": "https://ai.google.dev/gemini-api/docs/pricing#gemini-2.5-pro-preview",
|
|
"supported_modalities": [
|
|
"text",
|
|
"image",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_audio_output": false,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_url_context": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true,
|
|
"tpm": 10000000
|
|
},
|
|
"gemini/gemini-2.5-pro-preview-tts": {
|
|
"cache_read_input_token_cost": 3.125e-07,
|
|
"input_cost_per_audio_token": 7e-07,
|
|
"input_cost_per_token": 1.25e-06,
|
|
"input_cost_per_token_above_200k_tokens": 2.5e-06,
|
|
"litellm_provider": "gemini",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 65535,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 65535,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-05,
|
|
"output_cost_per_token_above_200k_tokens": 1.5e-05,
|
|
"rpm": 10000,
|
|
"source": "https://ai.google.dev/gemini-api/docs/pricing#gemini-2.5-pro-preview",
|
|
"supported_modalities": [
|
|
"text"
|
|
],
|
|
"supported_output_modalities": [
|
|
"audio"
|
|
],
|
|
"supports_audio_output": false,
|
|
"supports_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true,
|
|
"tpm": 10000000
|
|
},
|
|
"gemini/gemini-exp-1114": {
|
|
"input_cost_per_token": 0,
|
|
"input_cost_per_token_above_128k_tokens": 0,
|
|
"litellm_provider": "gemini",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 8192,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 8192,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"metadata": {
|
|
"notes": "Rate limits not documented for gemini-exp-1114. Assuming same as gemini-1.5-pro.",
|
|
"supports_tool_choice": true
|
|
},
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0,
|
|
"output_cost_per_token_above_128k_tokens": 0,
|
|
"rpm": 1000,
|
|
"source": "https://ai.google.dev/pricing",
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tpm": 4000000
|
|
},
|
|
"gemini/gemini-exp-1206": {
|
|
"input_cost_per_token": 0,
|
|
"input_cost_per_token_above_128k_tokens": 0,
|
|
"litellm_provider": "gemini",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 2097152,
|
|
"max_output_tokens": 8192,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 8192,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"metadata": {
|
|
"notes": "Rate limits not documented for gemini-exp-1206. Assuming same as gemini-1.5-pro.",
|
|
"supports_tool_choice": true
|
|
},
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0,
|
|
"output_cost_per_token_above_128k_tokens": 0,
|
|
"rpm": 1000,
|
|
"source": "https://ai.google.dev/pricing",
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tpm": 4000000
|
|
},
|
|
"gemini/gemini-gemma-2-27b-it": {
|
|
"input_cost_per_token": 3.5e-07,
|
|
"litellm_provider": "gemini",
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.05e-06,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"gemini/gemini-gemma-2-9b-it": {
|
|
"input_cost_per_token": 3.5e-07,
|
|
"litellm_provider": "gemini",
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.05e-06,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"gemini/gemini-pro": {
|
|
"input_cost_per_token": 3.5e-07,
|
|
"input_cost_per_token_above_128k_tokens": 7e-07,
|
|
"litellm_provider": "gemini",
|
|
"max_input_tokens": 32760,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.05e-06,
|
|
"output_cost_per_token_above_128k_tokens": 2.1e-06,
|
|
"rpd": 30000,
|
|
"rpm": 360,
|
|
"source": "https://ai.google.dev/gemini-api/docs/models/gemini",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"tpm": 120000
|
|
},
|
|
"gemini/gemini-pro-vision": {
|
|
"input_cost_per_token": 3.5e-07,
|
|
"input_cost_per_token_above_128k_tokens": 7e-07,
|
|
"litellm_provider": "gemini",
|
|
"max_input_tokens": 30720,
|
|
"max_output_tokens": 2048,
|
|
"max_tokens": 2048,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.05e-06,
|
|
"output_cost_per_token_above_128k_tokens": 2.1e-06,
|
|
"rpd": 30000,
|
|
"rpm": 360,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tpm": 120000
|
|
},
|
|
"gemini/gemma-3-27b-it": {
|
|
"input_cost_per_audio_per_second": 0,
|
|
"input_cost_per_audio_per_second_above_128k_tokens": 0,
|
|
"input_cost_per_character": 0,
|
|
"input_cost_per_character_above_128k_tokens": 0,
|
|
"input_cost_per_image": 0,
|
|
"input_cost_per_image_above_128k_tokens": 0,
|
|
"input_cost_per_token": 0,
|
|
"input_cost_per_token_above_128k_tokens": 0,
|
|
"input_cost_per_video_per_second": 0,
|
|
"input_cost_per_video_per_second_above_128k_tokens": 0,
|
|
"litellm_provider": "gemini",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_character": 0,
|
|
"output_cost_per_character_above_128k_tokens": 0,
|
|
"output_cost_per_token": 0,
|
|
"output_cost_per_token_above_128k_tokens": 0,
|
|
"source": "https://aistudio.google.com",
|
|
"supports_audio_output": false,
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": false,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"gemini/imagen-3.0-fast-generate-001": {
|
|
"litellm_provider": "gemini",
|
|
"mode": "image_generation",
|
|
"output_cost_per_image": 0.02,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/pricing"
|
|
},
|
|
"gemini/imagen-3.0-generate-001": {
|
|
"litellm_provider": "gemini",
|
|
"mode": "image_generation",
|
|
"output_cost_per_image": 0.04,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/pricing"
|
|
},
|
|
"gemini/imagen-3.0-generate-002": {
|
|
"litellm_provider": "gemini",
|
|
"mode": "image_generation",
|
|
"output_cost_per_image": 0.04,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/pricing"
|
|
},
|
|
"gemini/imagen-4.0-fast-generate-001": {
|
|
"litellm_provider": "gemini",
|
|
"mode": "image_generation",
|
|
"output_cost_per_image": 0.02,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/pricing"
|
|
},
|
|
"gemini/imagen-4.0-generate-001": {
|
|
"litellm_provider": "gemini",
|
|
"mode": "image_generation",
|
|
"output_cost_per_image": 0.04,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/pricing"
|
|
},
|
|
"gemini/imagen-4.0-ultra-generate-001": {
|
|
"litellm_provider": "gemini",
|
|
"mode": "image_generation",
|
|
"output_cost_per_image": 0.06,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/pricing"
|
|
},
|
|
"gemini/learnlm-1.5-pro-experimental": {
|
|
"input_cost_per_audio_per_second": 0,
|
|
"input_cost_per_audio_per_second_above_128k_tokens": 0,
|
|
"input_cost_per_character": 0,
|
|
"input_cost_per_character_above_128k_tokens": 0,
|
|
"input_cost_per_image": 0,
|
|
"input_cost_per_image_above_128k_tokens": 0,
|
|
"input_cost_per_token": 0,
|
|
"input_cost_per_token_above_128k_tokens": 0,
|
|
"input_cost_per_video_per_second": 0,
|
|
"input_cost_per_video_per_second_above_128k_tokens": 0,
|
|
"litellm_provider": "gemini",
|
|
"max_input_tokens": 32767,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_character": 0,
|
|
"output_cost_per_character_above_128k_tokens": 0,
|
|
"output_cost_per_token": 0,
|
|
"output_cost_per_token_above_128k_tokens": 0,
|
|
"source": "https://aistudio.google.com",
|
|
"supports_audio_output": false,
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"gemini/veo-2.0-generate-001": {
|
|
"litellm_provider": "gemini",
|
|
"max_input_tokens": 1024,
|
|
"max_tokens": 1024,
|
|
"mode": "video_generation",
|
|
"output_cost_per_second": 0.35,
|
|
"source": "https://ai.google.dev/gemini-api/docs/video",
|
|
"supported_modalities": [
|
|
"text"
|
|
],
|
|
"supported_output_modalities": [
|
|
"video"
|
|
]
|
|
},
|
|
"gemini/veo-3.0-fast-generate-preview": {
|
|
"litellm_provider": "gemini",
|
|
"max_input_tokens": 1024,
|
|
"max_tokens": 1024,
|
|
"mode": "video_generation",
|
|
"output_cost_per_second": 0.4,
|
|
"source": "https://ai.google.dev/gemini-api/docs/video",
|
|
"supported_modalities": [
|
|
"text"
|
|
],
|
|
"supported_output_modalities": [
|
|
"video"
|
|
]
|
|
},
|
|
"gemini/veo-3.0-generate-preview": {
|
|
"litellm_provider": "gemini",
|
|
"max_input_tokens": 1024,
|
|
"max_tokens": 1024,
|
|
"mode": "video_generation",
|
|
"output_cost_per_second": 0.75,
|
|
"source": "https://ai.google.dev/gemini-api/docs/video",
|
|
"supported_modalities": [
|
|
"text"
|
|
],
|
|
"supported_output_modalities": [
|
|
"video"
|
|
]
|
|
},
|
|
"gemini/veo-3.1-fast-generate-preview": {
|
|
"litellm_provider": "gemini",
|
|
"max_input_tokens": 1024,
|
|
"max_tokens": 1024,
|
|
"mode": "video_generation",
|
|
"output_cost_per_second": 0.15,
|
|
"source": "https://ai.google.dev/gemini-api/docs/video",
|
|
"supported_modalities": [
|
|
"text"
|
|
],
|
|
"supported_output_modalities": [
|
|
"video"
|
|
]
|
|
},
|
|
"gemini/veo-3.1-generate-preview": {
|
|
"litellm_provider": "gemini",
|
|
"max_input_tokens": 1024,
|
|
"max_tokens": 1024,
|
|
"mode": "video_generation",
|
|
"output_cost_per_second": 0.40,
|
|
"source": "https://ai.google.dev/gemini-api/docs/video",
|
|
"supported_modalities": [
|
|
"text"
|
|
],
|
|
"supported_output_modalities": [
|
|
"video"
|
|
]
|
|
},
|
|
"google_pse/search": {
|
|
"input_cost_per_query": 0.005,
|
|
"litellm_provider": "google_pse",
|
|
"mode": "search"
|
|
},
|
|
"global.anthropic.claude-sonnet-4-5-20250929-v1:0": {
|
|
"cache_creation_input_token_cost": 3.75e-06,
|
|
"cache_read_input_token_cost": 3e-07,
|
|
"input_cost_per_token": 3e-06,
|
|
"input_cost_per_token_above_200k_tokens": 6e-06,
|
|
"output_cost_per_token_above_200k_tokens": 2.25e-05,
|
|
"cache_creation_input_token_cost_above_200k_tokens": 7.5e-06,
|
|
"cache_read_input_token_cost_above_200k_tokens": 6e-07,
|
|
"litellm_provider": "bedrock_converse",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 64000,
|
|
"max_tokens": 64000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"search_context_cost_per_query": {
|
|
"search_context_size_high": 0.01,
|
|
"search_context_size_low": 0.01,
|
|
"search_context_size_medium": 0.01
|
|
},
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tool_use_system_prompt_tokens": 346
|
|
},
|
|
"global.anthropic.claude-sonnet-4-20250514-v1:0": {
|
|
"cache_creation_input_token_cost": 3.75e-06,
|
|
"cache_read_input_token_cost": 3e-07,
|
|
"input_cost_per_token": 3e-06,
|
|
"input_cost_per_token_above_200k_tokens": 6e-06,
|
|
"output_cost_per_token_above_200k_tokens": 2.25e-05,
|
|
"cache_creation_input_token_cost_above_200k_tokens": 7.5e-06,
|
|
"cache_read_input_token_cost_above_200k_tokens": 6e-07,
|
|
"litellm_provider": "bedrock_converse",
|
|
"max_input_tokens": 1000000,
|
|
"max_output_tokens": 64000,
|
|
"max_tokens": 64000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"search_context_cost_per_query": {
|
|
"search_context_size_high": 0.01,
|
|
"search_context_size_low": 0.01,
|
|
"search_context_size_medium": 0.01
|
|
},
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tool_use_system_prompt_tokens": 159
|
|
},
|
|
"global.anthropic.claude-haiku-4-5-20251001-v1:0": {
|
|
"cache_creation_input_token_cost": 1.375e-06,
|
|
"cache_read_input_token_cost": 1.1e-07,
|
|
"input_cost_per_token": 1.1e-06,
|
|
"litellm_provider": "bedrock_converse",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 64000,
|
|
"max_tokens": 64000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 5.5e-06,
|
|
"source": "https://aws.amazon.com/about-aws/whats-new/2025/10/claude-4-5-haiku-anthropic-amazon-bedrock",
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tool_use_system_prompt_tokens": 346
|
|
},
|
|
"gpt-3.5-turbo": {
|
|
"input_cost_per_token": 0.5e-06,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 16385,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4097,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-06,
|
|
"supports_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"gpt-3.5-turbo-0125": {
|
|
"input_cost_per_token": 5e-07,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 16385,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 16385,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-06,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"gpt-3.5-turbo-0301": {
|
|
"input_cost_per_token": 1.5e-06,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 4097,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4097,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2e-06,
|
|
"supports_prompt_caching": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"gpt-3.5-turbo-0613": {
|
|
"input_cost_per_token": 1.5e-06,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 4097,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4097,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2e-06,
|
|
"supports_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"gpt-3.5-turbo-1106": {
|
|
"deprecation_date": "2026-09-28",
|
|
"input_cost_per_token": 1e-06,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 16385,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 16385,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2e-06,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"gpt-3.5-turbo-16k": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 16385,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 16385,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4e-06,
|
|
"supports_prompt_caching": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"gpt-3.5-turbo-16k-0613": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 16385,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 16385,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4e-06,
|
|
"supports_prompt_caching": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"gpt-3.5-turbo-instruct": {
|
|
"input_cost_per_token": 1.5e-06,
|
|
"litellm_provider": "text-completion-openai",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "completion",
|
|
"output_cost_per_token": 2e-06
|
|
},
|
|
"gpt-3.5-turbo-instruct-0914": {
|
|
"input_cost_per_token": 1.5e-06,
|
|
"litellm_provider": "text-completion-openai",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 4097,
|
|
"max_tokens": 4097,
|
|
"mode": "completion",
|
|
"output_cost_per_token": 2e-06
|
|
},
|
|
"gpt-4": {
|
|
"input_cost_per_token": 3e-05,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-05,
|
|
"supports_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"gpt-4-0125-preview": {
|
|
"deprecation_date": "2026-03-26",
|
|
"input_cost_per_token": 1e-05,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-05,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"gpt-4-0314": {
|
|
"input_cost_per_token": 3e-05,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-05,
|
|
"supports_prompt_caching": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"gpt-4-0613": {
|
|
"deprecation_date": "2025-06-06",
|
|
"input_cost_per_token": 3e-05,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-05,
|
|
"supports_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"gpt-4-1106-preview": {
|
|
"deprecation_date": "2026-03-26",
|
|
"input_cost_per_token": 1e-05,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-05,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"gpt-4-1106-vision-preview": {
|
|
"deprecation_date": "2024-12-06",
|
|
"input_cost_per_token": 1e-05,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-05,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"gpt-4-32k": {
|
|
"input_cost_per_token": 6e-05,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 32768,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.00012,
|
|
"supports_prompt_caching": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"gpt-4-32k-0314": {
|
|
"input_cost_per_token": 6e-05,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 32768,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.00012,
|
|
"supports_prompt_caching": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"gpt-4-32k-0613": {
|
|
"input_cost_per_token": 6e-05,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 32768,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.00012,
|
|
"supports_prompt_caching": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"gpt-4-turbo": {
|
|
"input_cost_per_token": 1e-05,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-05,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"gpt-4-turbo-2024-04-09": {
|
|
"input_cost_per_token": 1e-05,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-05,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"gpt-4-turbo-preview": {
|
|
"input_cost_per_token": 1e-05,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-05,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"gpt-4-vision-preview": {
|
|
"deprecation_date": "2024-12-06",
|
|
"input_cost_per_token": 1e-05,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-05,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"gpt-4.1": {
|
|
"cache_read_input_token_cost": 5e-07,
|
|
"cache_read_input_token_cost_priority": 8.75e-07,
|
|
"input_cost_per_token": 2e-06,
|
|
"input_cost_per_token_batches": 1e-06,
|
|
"input_cost_per_token_priority": 3.5e-06,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 1047576,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 8e-06,
|
|
"output_cost_per_token_batches": 4e-06,
|
|
"output_cost_per_token_priority": 1.4e-05,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/batch",
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_service_tier": true,
|
|
"supports_vision": true
|
|
},
|
|
"gpt-4.1-2025-04-14": {
|
|
"cache_read_input_token_cost": 5e-07,
|
|
"input_cost_per_token": 2e-06,
|
|
"input_cost_per_token_batches": 1e-06,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 1047576,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 8e-06,
|
|
"output_cost_per_token_batches": 4e-06,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/batch",
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_service_tier": true,
|
|
"supports_vision": true
|
|
},
|
|
"gpt-4.1-mini": {
|
|
"cache_read_input_token_cost": 1e-07,
|
|
"cache_read_input_token_cost_priority": 1.75e-07,
|
|
"input_cost_per_token": 4e-07,
|
|
"input_cost_per_token_batches": 2e-07,
|
|
"input_cost_per_token_priority": 7e-07,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 1047576,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.6e-06,
|
|
"output_cost_per_token_batches": 8e-07,
|
|
"output_cost_per_token_priority": 2.8e-06,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/batch",
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_service_tier": true,
|
|
"supports_vision": true
|
|
},
|
|
"gpt-4.1-mini-2025-04-14": {
|
|
"cache_read_input_token_cost": 1e-07,
|
|
"input_cost_per_token": 4e-07,
|
|
"input_cost_per_token_batches": 2e-07,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 1047576,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.6e-06,
|
|
"output_cost_per_token_batches": 8e-07,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/batch",
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_service_tier": true,
|
|
"supports_vision": true
|
|
},
|
|
"gpt-4.1-nano": {
|
|
"cache_read_input_token_cost": 2.5e-08,
|
|
"cache_read_input_token_cost_priority": 5e-08,
|
|
"input_cost_per_token": 1e-07,
|
|
"input_cost_per_token_batches": 5e-08,
|
|
"input_cost_per_token_priority": 2e-07,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 1047576,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4e-07,
|
|
"output_cost_per_token_batches": 2e-07,
|
|
"output_cost_per_token_priority": 8e-07,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/batch",
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_service_tier": true,
|
|
"supports_vision": true
|
|
},
|
|
"gpt-4.1-nano-2025-04-14": {
|
|
"cache_read_input_token_cost": 2.5e-08,
|
|
"input_cost_per_token": 1e-07,
|
|
"input_cost_per_token_batches": 5e-08,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 1047576,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4e-07,
|
|
"output_cost_per_token_batches": 2e-07,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/batch",
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_service_tier": true,
|
|
"supports_vision": true
|
|
},
|
|
"gpt-4.5-preview": {
|
|
"cache_read_input_token_cost": 3.75e-05,
|
|
"input_cost_per_token": 7.5e-05,
|
|
"input_cost_per_token_batches": 3.75e-05,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.00015,
|
|
"output_cost_per_token_batches": 7.5e-05,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"gpt-4.5-preview-2025-02-27": {
|
|
"cache_read_input_token_cost": 3.75e-05,
|
|
"deprecation_date": "2025-07-14",
|
|
"input_cost_per_token": 7.5e-05,
|
|
"input_cost_per_token_batches": 3.75e-05,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.00015,
|
|
"output_cost_per_token_batches": 7.5e-05,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"gpt-4o": {
|
|
"cache_read_input_token_cost": 1.25e-06,
|
|
"cache_read_input_token_cost_priority": 2.125e-06,
|
|
"input_cost_per_token": 2.5e-06,
|
|
"input_cost_per_token_batches": 1.25e-06,
|
|
"input_cost_per_token_priority": 4.25e-06,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-05,
|
|
"output_cost_per_token_batches": 5e-06,
|
|
"output_cost_per_token_priority": 1.7e-05,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_service_tier": true,
|
|
"supports_vision": true
|
|
},
|
|
"gpt-4o-2024-05-13": {
|
|
"input_cost_per_token": 5e-06,
|
|
"input_cost_per_token_batches": 2.5e-06,
|
|
"input_cost_per_token_priority": 8.75e-06,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"output_cost_per_token_batches": 7.5e-06,
|
|
"output_cost_per_token_priority": 2.625e-05,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"gpt-4o-2024-08-06": {
|
|
"cache_read_input_token_cost": 1.25e-06,
|
|
"input_cost_per_token": 2.5e-06,
|
|
"input_cost_per_token_batches": 1.25e-06,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-05,
|
|
"output_cost_per_token_batches": 5e-06,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_service_tier": true,
|
|
"supports_vision": true
|
|
},
|
|
"gpt-4o-2024-11-20": {
|
|
"cache_read_input_token_cost": 1.25e-06,
|
|
"input_cost_per_token": 2.5e-06,
|
|
"input_cost_per_token_batches": 1.25e-06,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-05,
|
|
"output_cost_per_token_batches": 5e-06,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_service_tier": true,
|
|
"supports_vision": true
|
|
},
|
|
"gpt-4o-audio-preview": {
|
|
"input_cost_per_audio_token": 0.0001,
|
|
"input_cost_per_token": 2.5e-06,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_audio_token": 0.0002,
|
|
"output_cost_per_token": 1e-05,
|
|
"supports_audio_input": true,
|
|
"supports_audio_output": true,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"gpt-4o-audio-preview-2024-10-01": {
|
|
"input_cost_per_audio_token": 0.0001,
|
|
"input_cost_per_token": 2.5e-06,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_audio_token": 0.0002,
|
|
"output_cost_per_token": 1e-05,
|
|
"supports_audio_input": true,
|
|
"supports_audio_output": true,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"gpt-4o-audio-preview-2024-12-17": {
|
|
"input_cost_per_audio_token": 4e-05,
|
|
"input_cost_per_token": 2.5e-06,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_audio_token": 8e-05,
|
|
"output_cost_per_token": 1e-05,
|
|
"supports_audio_input": true,
|
|
"supports_audio_output": true,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"gpt-4o-audio-preview-2025-06-03": {
|
|
"input_cost_per_audio_token": 4e-05,
|
|
"input_cost_per_token": 2.5e-06,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_audio_token": 8e-05,
|
|
"output_cost_per_token": 1e-05,
|
|
"supports_audio_input": true,
|
|
"supports_audio_output": true,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"gpt-4o-mini": {
|
|
"cache_read_input_token_cost": 7.5e-08,
|
|
"cache_read_input_token_cost_priority": 1.25e-07,
|
|
"input_cost_per_token": 1.5e-07,
|
|
"input_cost_per_token_batches": 7.5e-08,
|
|
"input_cost_per_token_priority": 2.5e-07,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-07,
|
|
"output_cost_per_token_batches": 3e-07,
|
|
"output_cost_per_token_priority": 1e-06,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_service_tier": true,
|
|
"supports_vision": true
|
|
},
|
|
"gpt-4o-mini-2024-07-18": {
|
|
"cache_read_input_token_cost": 7.5e-08,
|
|
"input_cost_per_token": 1.5e-07,
|
|
"input_cost_per_token_batches": 7.5e-08,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-07,
|
|
"output_cost_per_token_batches": 3e-07,
|
|
"search_context_cost_per_query": {
|
|
"search_context_size_high": 0.03,
|
|
"search_context_size_low": 0.025,
|
|
"search_context_size_medium": 0.0275
|
|
},
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_service_tier": true,
|
|
"supports_vision": true
|
|
},
|
|
"gpt-4o-mini-audio-preview": {
|
|
"input_cost_per_audio_token": 1e-05,
|
|
"input_cost_per_token": 1.5e-07,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_audio_token": 2e-05,
|
|
"output_cost_per_token": 6e-07,
|
|
"supports_audio_input": true,
|
|
"supports_audio_output": true,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"gpt-4o-mini-audio-preview-2024-12-17": {
|
|
"input_cost_per_audio_token": 1e-05,
|
|
"input_cost_per_token": 1.5e-07,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_audio_token": 2e-05,
|
|
"output_cost_per_token": 6e-07,
|
|
"supports_audio_input": true,
|
|
"supports_audio_output": true,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"gpt-4o-mini-realtime-preview": {
|
|
"cache_creation_input_audio_token_cost": 3e-07,
|
|
"cache_read_input_token_cost": 3e-07,
|
|
"input_cost_per_audio_token": 1e-05,
|
|
"input_cost_per_token": 6e-07,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_audio_token": 2e-05,
|
|
"output_cost_per_token": 2.4e-06,
|
|
"supports_audio_input": true,
|
|
"supports_audio_output": true,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"gpt-4o-mini-realtime-preview-2024-12-17": {
|
|
"cache_creation_input_audio_token_cost": 3e-07,
|
|
"cache_read_input_token_cost": 3e-07,
|
|
"input_cost_per_audio_token": 1e-05,
|
|
"input_cost_per_token": 6e-07,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_audio_token": 2e-05,
|
|
"output_cost_per_token": 2.4e-06,
|
|
"supports_audio_input": true,
|
|
"supports_audio_output": true,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"gpt-4o-mini-search-preview": {
|
|
"cache_read_input_token_cost": 7.5e-08,
|
|
"input_cost_per_token": 1.5e-07,
|
|
"input_cost_per_token_batches": 7.5e-08,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-07,
|
|
"output_cost_per_token_batches": 3e-07,
|
|
"search_context_cost_per_query": {
|
|
"search_context_size_high": 0.03,
|
|
"search_context_size_low": 0.025,
|
|
"search_context_size_medium": 0.0275
|
|
},
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true
|
|
},
|
|
"gpt-4o-mini-search-preview-2025-03-11": {
|
|
"cache_read_input_token_cost": 7.5e-08,
|
|
"input_cost_per_token": 1.5e-07,
|
|
"input_cost_per_token_batches": 7.5e-08,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-07,
|
|
"output_cost_per_token_batches": 3e-07,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"gpt-4o-mini-transcribe": {
|
|
"input_cost_per_audio_token": 3e-06,
|
|
"input_cost_per_token": 1.25e-06,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 16000,
|
|
"max_output_tokens": 2000,
|
|
"mode": "audio_transcription",
|
|
"output_cost_per_token": 5e-06,
|
|
"supported_endpoints": [
|
|
"/v1/audio/transcriptions"
|
|
]
|
|
},
|
|
"gpt-4o-mini-tts": {
|
|
"input_cost_per_token": 2.5e-06,
|
|
"litellm_provider": "openai",
|
|
"mode": "audio_speech",
|
|
"output_cost_per_audio_token": 1.2e-05,
|
|
"output_cost_per_second": 0.00025,
|
|
"output_cost_per_token": 1e-05,
|
|
"supported_endpoints": [
|
|
"/v1/audio/speech"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"audio"
|
|
],
|
|
"supported_output_modalities": [
|
|
"audio"
|
|
]
|
|
},
|
|
"gpt-4o-realtime-preview": {
|
|
"cache_read_input_token_cost": 2.5e-06,
|
|
"input_cost_per_audio_token": 4e-05,
|
|
"input_cost_per_token": 5e-06,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_audio_token": 8e-05,
|
|
"output_cost_per_token": 2e-05,
|
|
"supports_audio_input": true,
|
|
"supports_audio_output": true,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"gpt-4o-realtime-preview-2024-10-01": {
|
|
"cache_creation_input_audio_token_cost": 2e-05,
|
|
"cache_read_input_token_cost": 2.5e-06,
|
|
"input_cost_per_audio_token": 0.0001,
|
|
"input_cost_per_token": 5e-06,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_audio_token": 0.0002,
|
|
"output_cost_per_token": 2e-05,
|
|
"supports_audio_input": true,
|
|
"supports_audio_output": true,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"gpt-4o-realtime-preview-2024-12-17": {
|
|
"cache_read_input_token_cost": 2.5e-06,
|
|
"input_cost_per_audio_token": 4e-05,
|
|
"input_cost_per_token": 5e-06,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_audio_token": 8e-05,
|
|
"output_cost_per_token": 2e-05,
|
|
"supports_audio_input": true,
|
|
"supports_audio_output": true,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"gpt-4o-realtime-preview-2025-06-03": {
|
|
"cache_read_input_token_cost": 2.5e-06,
|
|
"input_cost_per_audio_token": 4e-05,
|
|
"input_cost_per_token": 5e-06,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_audio_token": 8e-05,
|
|
"output_cost_per_token": 2e-05,
|
|
"supports_audio_input": true,
|
|
"supports_audio_output": true,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"gpt-4o-search-preview": {
|
|
"cache_read_input_token_cost": 1.25e-06,
|
|
"input_cost_per_token": 2.5e-06,
|
|
"input_cost_per_token_batches": 1.25e-06,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-05,
|
|
"output_cost_per_token_batches": 5e-06,
|
|
"search_context_cost_per_query": {
|
|
"search_context_size_high": 0.05,
|
|
"search_context_size_low": 0.03,
|
|
"search_context_size_medium": 0.035
|
|
},
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true
|
|
},
|
|
"gpt-4o-search-preview-2025-03-11": {
|
|
"cache_read_input_token_cost": 1.25e-06,
|
|
"input_cost_per_token": 2.5e-06,
|
|
"input_cost_per_token_batches": 1.25e-06,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-05,
|
|
"output_cost_per_token_batches": 5e-06,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"gpt-4o-transcribe": {
|
|
"input_cost_per_audio_token": 6e-06,
|
|
"input_cost_per_token": 2.5e-06,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 16000,
|
|
"max_output_tokens": 2000,
|
|
"mode": "audio_transcription",
|
|
"output_cost_per_token": 1e-05,
|
|
"supported_endpoints": [
|
|
"/v1/audio/transcriptions"
|
|
]
|
|
},
|
|
"gpt-5": {
|
|
"cache_read_input_token_cost": 1.25e-07,
|
|
"cache_read_input_token_cost_flex": 6.25e-08,
|
|
"cache_read_input_token_cost_priority": 2.5e-07,
|
|
"input_cost_per_token": 1.25e-06,
|
|
"input_cost_per_token_flex": 6.25e-07,
|
|
"input_cost_per_token_priority": 2.5e-06,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 272000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-05,
|
|
"output_cost_per_token_flex": 5e-06,
|
|
"output_cost_per_token_priority": 2e-05,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/batch",
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_service_tier": true,
|
|
"supports_vision": true
|
|
},
|
|
"gpt-5.1": {
|
|
"cache_read_input_token_cost": 1.25e-07,
|
|
"cache_read_input_token_cost_priority": 2.5e-07,
|
|
"input_cost_per_token": 1.25e-06,
|
|
"input_cost_per_token_priority": 2.5e-06,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 272000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-05,
|
|
"output_cost_per_token_priority": 2e-05,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_service_tier": true,
|
|
"supports_vision": true
|
|
},
|
|
"gpt-5.1-2025-11-13": {
|
|
"cache_read_input_token_cost": 1.25e-07,
|
|
"cache_read_input_token_cost_priority": 2.5e-07,
|
|
"input_cost_per_token": 1.25e-06,
|
|
"input_cost_per_token_priority": 2.5e-06,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 272000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-05,
|
|
"output_cost_per_token_priority": 2e-05,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_service_tier": true,
|
|
"supports_vision": true
|
|
},
|
|
"gpt-5.1-chat-latest": {
|
|
"cache_read_input_token_cost": 1.25e-07,
|
|
"cache_read_input_token_cost_priority": 2.5e-07,
|
|
"input_cost_per_token": 1.25e-06,
|
|
"input_cost_per_token_priority": 2.5e-06,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-05,
|
|
"output_cost_per_token_priority": 2e-05,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supports_function_calling": false,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": false,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": false,
|
|
"supports_vision": true
|
|
},
|
|
"gpt-5-pro": {
|
|
"input_cost_per_token": 1.5e-05,
|
|
"input_cost_per_token_batches": 7.5e-06,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 400000,
|
|
"max_output_tokens": 272000,
|
|
"max_tokens": 272000,
|
|
"mode": "responses",
|
|
"output_cost_per_token": 1.2e-04,
|
|
"output_cost_per_token_batches": 6e-05,
|
|
"supported_endpoints": [
|
|
"/v1/batch",
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": false,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true
|
|
},
|
|
"gpt-5-pro-2025-10-06": {
|
|
"input_cost_per_token": 1.5e-05,
|
|
"input_cost_per_token_batches": 7.5e-06,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 400000,
|
|
"max_output_tokens": 272000,
|
|
"max_tokens": 272000,
|
|
"mode": "responses",
|
|
"output_cost_per_token": 1.2e-04,
|
|
"output_cost_per_token_batches": 6e-05,
|
|
"supported_endpoints": [
|
|
"/v1/batch",
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": false,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true
|
|
},
|
|
"gpt-5-2025-08-07": {
|
|
"cache_read_input_token_cost": 1.25e-07,
|
|
"cache_read_input_token_cost_flex": 6.25e-08,
|
|
"cache_read_input_token_cost_priority": 2.5e-07,
|
|
"input_cost_per_token": 1.25e-06,
|
|
"input_cost_per_token_flex": 6.25e-07,
|
|
"input_cost_per_token_priority": 2.5e-06,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 272000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-05,
|
|
"output_cost_per_token_flex": 5e-06,
|
|
"output_cost_per_token_priority": 2e-05,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/batch",
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_service_tier": true,
|
|
"supports_vision": true
|
|
},
|
|
"gpt-5-chat": {
|
|
"cache_read_input_token_cost": 1.25e-07,
|
|
"input_cost_per_token": 1.25e-06,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 272000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-05,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/batch",
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": false,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": false,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": false,
|
|
"supports_vision": true
|
|
},
|
|
"gpt-5-chat-latest": {
|
|
"cache_read_input_token_cost": 1.25e-07,
|
|
"input_cost_per_token": 1.25e-06,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-05,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/batch",
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": false,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": false,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": false,
|
|
"supports_vision": true
|
|
},
|
|
"gpt-5-codex": {
|
|
"cache_read_input_token_cost": 1.25e-07,
|
|
"input_cost_per_token": 1.25e-06,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 272000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "responses",
|
|
"output_cost_per_token": 1e-05,
|
|
"supported_endpoints": [
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": false,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"gpt-5.1-codex": {
|
|
"cache_read_input_token_cost": 1.25e-07,
|
|
"cache_read_input_token_cost_priority": 2.5e-07,
|
|
"input_cost_per_token": 1.25e-06,
|
|
"input_cost_per_token_priority": 2.5e-06,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 272000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "responses",
|
|
"output_cost_per_token": 1e-05,
|
|
"output_cost_per_token_priority": 2e-05,
|
|
"supported_endpoints": [
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": false,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"gpt-5.1-codex-max": {
|
|
"cache_read_input_token_cost": 1.25e-07,
|
|
"input_cost_per_token": 1.25e-06,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 400000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "responses",
|
|
"output_cost_per_token": 1e-05,
|
|
"supported_endpoints": [
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": false,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"gpt-5.1-codex-mini": {
|
|
"cache_read_input_token_cost": 2.5e-08,
|
|
"cache_read_input_token_cost_priority": 4.5e-08,
|
|
"input_cost_per_token": 2.5e-07,
|
|
"input_cost_per_token_priority": 4.5e-07,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 272000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "responses",
|
|
"output_cost_per_token": 2e-06,
|
|
"output_cost_per_token_priority": 3.6e-06,
|
|
"supported_endpoints": [
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": false,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"gpt-5-mini": {
|
|
"cache_read_input_token_cost": 2.5e-08,
|
|
"cache_read_input_token_cost_flex": 1.25e-08,
|
|
"cache_read_input_token_cost_priority": 4.5e-08,
|
|
"input_cost_per_token": 2.5e-07,
|
|
"input_cost_per_token_flex": 1.25e-07,
|
|
"input_cost_per_token_priority": 4.5e-07,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 272000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2e-06,
|
|
"output_cost_per_token_flex": 1e-06,
|
|
"output_cost_per_token_priority": 3.6e-06,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/batch",
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_service_tier": true,
|
|
"supports_vision": true
|
|
},
|
|
"gpt-5-mini-2025-08-07": {
|
|
"cache_read_input_token_cost": 2.5e-08,
|
|
"cache_read_input_token_cost_flex": 1.25e-08,
|
|
"cache_read_input_token_cost_priority": 4.5e-08,
|
|
"input_cost_per_token": 2.5e-07,
|
|
"input_cost_per_token_flex": 1.25e-07,
|
|
"input_cost_per_token_priority": 4.5e-07,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 272000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2e-06,
|
|
"output_cost_per_token_flex": 1e-06,
|
|
"output_cost_per_token_priority": 3.6e-06,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/batch",
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_service_tier": true,
|
|
"supports_vision": true
|
|
},
|
|
"gpt-5-nano": {
|
|
"cache_read_input_token_cost": 5e-09,
|
|
"cache_read_input_token_cost_flex": 2.5e-09,
|
|
"input_cost_per_token": 5e-08,
|
|
"input_cost_per_token_flex": 2.5e-08,
|
|
"input_cost_per_token_priority": 2.5e-06,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 272000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4e-07,
|
|
"output_cost_per_token_flex": 2e-07,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/batch",
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"gpt-5-nano-2025-08-07": {
|
|
"cache_read_input_token_cost": 5e-09,
|
|
"cache_read_input_token_cost_flex": 2.5e-09,
|
|
"input_cost_per_token": 5e-08,
|
|
"input_cost_per_token_flex": 2.5e-08,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 272000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4e-07,
|
|
"output_cost_per_token_flex": 2e-07,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/batch",
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"gpt-image-1": {
|
|
"input_cost_per_pixel": 4.0054321e-08,
|
|
"litellm_provider": "openai",
|
|
"mode": "image_generation",
|
|
"output_cost_per_pixel": 0.0,
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"gpt-image-1-mini": {
|
|
"cache_read_input_image_token_cost": 2.5e-07,
|
|
"cache_read_input_token_cost": 2e-07,
|
|
"input_cost_per_image_token": 2.5e-06,
|
|
"input_cost_per_token": 2e-06,
|
|
"litellm_provider": "openai",
|
|
"mode": "chat",
|
|
"output_cost_per_image_token": 8e-06,
|
|
"supported_endpoints": [
|
|
"/v1/images/generations",
|
|
"/v1/images/edits"
|
|
]
|
|
},
|
|
"gpt-realtime": {
|
|
"cache_creation_input_audio_token_cost": 4e-07,
|
|
"cache_read_input_token_cost": 4e-07,
|
|
"input_cost_per_audio_token": 3.2e-05,
|
|
"input_cost_per_image": 5e-06,
|
|
"input_cost_per_token": 4e-06,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 32000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_audio_token": 6.4e-05,
|
|
"output_cost_per_token": 1.6e-05,
|
|
"supported_endpoints": [
|
|
"/v1/realtime"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image",
|
|
"audio"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text",
|
|
"audio"
|
|
],
|
|
"supports_audio_input": true,
|
|
"supports_audio_output": true,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"gpt-realtime-mini": {
|
|
"cache_creation_input_audio_token_cost": 3e-07,
|
|
"cache_read_input_audio_token_cost": 3e-07,
|
|
"input_cost_per_audio_token": 1e-05,
|
|
"input_cost_per_token": 6e-07,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_audio_token": 2e-05,
|
|
"output_cost_per_token": 2.4e-06,
|
|
"supported_endpoints": [
|
|
"/v1/realtime"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image",
|
|
"audio"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text",
|
|
"audio"
|
|
],
|
|
"supports_audio_input": true,
|
|
"supports_audio_output": true,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"gpt-realtime-2025-08-28": {
|
|
"cache_creation_input_audio_token_cost": 4e-07,
|
|
"cache_read_input_token_cost": 4e-07,
|
|
"input_cost_per_audio_token": 3.2e-05,
|
|
"input_cost_per_image": 5e-06,
|
|
"input_cost_per_token": 4e-06,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 32000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_audio_token": 6.4e-05,
|
|
"output_cost_per_token": 1.6e-05,
|
|
"supported_endpoints": [
|
|
"/v1/realtime"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image",
|
|
"audio"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text",
|
|
"audio"
|
|
],
|
|
"supports_audio_input": true,
|
|
"supports_audio_output": true,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"gradient_ai/alibaba-qwen3-32b": {
|
|
"litellm_provider": "gradient_ai",
|
|
"max_tokens": 2048,
|
|
"mode": "chat",
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions"
|
|
],
|
|
"supported_modalities": [
|
|
"text"
|
|
],
|
|
"supports_tool_choice": false
|
|
},
|
|
"gradient_ai/anthropic-claude-3-opus": {
|
|
"input_cost_per_token": 1.5e-05,
|
|
"litellm_provider": "gradient_ai",
|
|
"max_tokens": 1024,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7.5e-05,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions"
|
|
],
|
|
"supported_modalities": [
|
|
"text"
|
|
],
|
|
"supports_tool_choice": false
|
|
},
|
|
"gradient_ai/anthropic-claude-3.5-haiku": {
|
|
"input_cost_per_token": 8e-07,
|
|
"litellm_provider": "gradient_ai",
|
|
"max_tokens": 1024,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4e-06,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions"
|
|
],
|
|
"supported_modalities": [
|
|
"text"
|
|
],
|
|
"supports_tool_choice": false
|
|
},
|
|
"gradient_ai/anthropic-claude-3.5-sonnet": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "gradient_ai",
|
|
"max_tokens": 1024,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions"
|
|
],
|
|
"supported_modalities": [
|
|
"text"
|
|
],
|
|
"supports_tool_choice": false
|
|
},
|
|
"gradient_ai/anthropic-claude-3.7-sonnet": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "gradient_ai",
|
|
"max_tokens": 1024,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions"
|
|
],
|
|
"supported_modalities": [
|
|
"text"
|
|
],
|
|
"supports_tool_choice": false
|
|
},
|
|
"gradient_ai/deepseek-r1-distill-llama-70b": {
|
|
"input_cost_per_token": 9.9e-07,
|
|
"litellm_provider": "gradient_ai",
|
|
"max_tokens": 8000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 9.9e-07,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions"
|
|
],
|
|
"supported_modalities": [
|
|
"text"
|
|
],
|
|
"supports_tool_choice": false
|
|
},
|
|
"gradient_ai/llama3-8b-instruct": {
|
|
"input_cost_per_token": 2e-07,
|
|
"litellm_provider": "gradient_ai",
|
|
"max_tokens": 512,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2e-07,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions"
|
|
],
|
|
"supported_modalities": [
|
|
"text"
|
|
],
|
|
"supports_tool_choice": false
|
|
},
|
|
"gradient_ai/llama3.3-70b-instruct": {
|
|
"input_cost_per_token": 6.5e-07,
|
|
"litellm_provider": "gradient_ai",
|
|
"max_tokens": 2048,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6.5e-07,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions"
|
|
],
|
|
"supported_modalities": [
|
|
"text"
|
|
],
|
|
"supports_tool_choice": false
|
|
},
|
|
"gradient_ai/mistral-nemo-instruct-2407": {
|
|
"input_cost_per_token": 3e-07,
|
|
"litellm_provider": "gradient_ai",
|
|
"max_tokens": 512,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-07,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions"
|
|
],
|
|
"supported_modalities": [
|
|
"text"
|
|
],
|
|
"supports_tool_choice": false
|
|
},
|
|
"gradient_ai/openai-gpt-4o": {
|
|
"litellm_provider": "gradient_ai",
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions"
|
|
],
|
|
"supported_modalities": [
|
|
"text"
|
|
],
|
|
"supports_tool_choice": false
|
|
},
|
|
"gradient_ai/openai-gpt-4o-mini": {
|
|
"litellm_provider": "gradient_ai",
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions"
|
|
],
|
|
"supported_modalities": [
|
|
"text"
|
|
],
|
|
"supports_tool_choice": false
|
|
},
|
|
"gradient_ai/openai-o3": {
|
|
"input_cost_per_token": 2e-06,
|
|
"litellm_provider": "gradient_ai",
|
|
"max_tokens": 100000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 8e-06,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions"
|
|
],
|
|
"supported_modalities": [
|
|
"text"
|
|
],
|
|
"supports_tool_choice": false
|
|
},
|
|
"gradient_ai/openai-o3-mini": {
|
|
"input_cost_per_token": 1.1e-06,
|
|
"litellm_provider": "gradient_ai",
|
|
"max_tokens": 100000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4.4e-06,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions"
|
|
],
|
|
"supported_modalities": [
|
|
"text"
|
|
],
|
|
"supports_tool_choice": false
|
|
},
|
|
"lemonade/Qwen3-Coder-30B-A3B-Instruct-GGUF": {
|
|
"input_cost_per_token": 0,
|
|
"litellm_provider": "lemonade",
|
|
"max_tokens": 262144,
|
|
"max_input_tokens": 262144,
|
|
"max_output_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0,
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"lemonade/gpt-oss-20b-mxfp4-GGUF": {
|
|
"input_cost_per_token": 0,
|
|
"litellm_provider": "lemonade",
|
|
"max_tokens": 131072,
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0,
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"lemonade/gpt-oss-120b-mxfp-GGUF": {
|
|
"input_cost_per_token": 0,
|
|
"litellm_provider": "lemonade",
|
|
"max_tokens": 131072,
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0,
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"lemonade/Gemma-3-4b-it-GGUF": {
|
|
"input_cost_per_token": 0,
|
|
"litellm_provider": "lemonade",
|
|
"max_tokens": 128000,
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0,
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"lemonade/Qwen3-4B-Instruct-2507-GGUF": {
|
|
"input_cost_per_token": 0,
|
|
"litellm_provider": "lemonade",
|
|
"max_tokens": 262144,
|
|
"max_input_tokens": 262144,
|
|
"max_output_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0,
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"groq/deepseek-r1-distill-llama-70b": {
|
|
"input_cost_per_token": 7.5e-07,
|
|
"litellm_provider": "groq",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 9.9e-07,
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": false,
|
|
"supports_tool_choice": true
|
|
},
|
|
"groq/distil-whisper-large-v3-en": {
|
|
"input_cost_per_second": 5.56e-06,
|
|
"litellm_provider": "groq",
|
|
"mode": "audio_transcription",
|
|
"output_cost_per_second": 0.0
|
|
},
|
|
"groq/gemma-7b-it": {
|
|
"deprecation_date": "2024-12-18",
|
|
"input_cost_per_token": 7e-08,
|
|
"litellm_provider": "groq",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7e-08,
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": false,
|
|
"supports_tool_choice": true
|
|
},
|
|
"groq/gemma2-9b-it": {
|
|
"input_cost_per_token": 2e-07,
|
|
"litellm_provider": "groq",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2e-07,
|
|
"supports_function_calling": false,
|
|
"supports_response_schema": false,
|
|
"supports_tool_choice": false
|
|
},
|
|
"groq/llama-3.1-405b-reasoning": {
|
|
"input_cost_per_token": 5.9e-07,
|
|
"litellm_provider": "groq",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7.9e-07,
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": false,
|
|
"supports_tool_choice": true
|
|
},
|
|
"groq/llama-3.1-70b-versatile": {
|
|
"deprecation_date": "2025-01-24",
|
|
"input_cost_per_token": 5.9e-07,
|
|
"litellm_provider": "groq",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7.9e-07,
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": false,
|
|
"supports_tool_choice": true
|
|
},
|
|
"groq/llama-3.1-8b-instant": {
|
|
"input_cost_per_token": 5e-08,
|
|
"litellm_provider": "groq",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 8e-08,
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": false,
|
|
"supports_tool_choice": true
|
|
},
|
|
"groq/llama-3.2-11b-text-preview": {
|
|
"deprecation_date": "2024-10-28",
|
|
"input_cost_per_token": 1.8e-07,
|
|
"litellm_provider": "groq",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.8e-07,
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": false,
|
|
"supports_tool_choice": true
|
|
},
|
|
"groq/llama-3.2-11b-vision-preview": {
|
|
"deprecation_date": "2025-04-14",
|
|
"input_cost_per_token": 1.8e-07,
|
|
"litellm_provider": "groq",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.8e-07,
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": false,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"groq/llama-3.2-1b-preview": {
|
|
"deprecation_date": "2025-04-14",
|
|
"input_cost_per_token": 4e-08,
|
|
"litellm_provider": "groq",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4e-08,
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": false,
|
|
"supports_tool_choice": true
|
|
},
|
|
"groq/llama-3.2-3b-preview": {
|
|
"deprecation_date": "2025-04-14",
|
|
"input_cost_per_token": 6e-08,
|
|
"litellm_provider": "groq",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-08,
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": false,
|
|
"supports_tool_choice": true
|
|
},
|
|
"groq/llama-3.2-90b-text-preview": {
|
|
"deprecation_date": "2024-11-25",
|
|
"input_cost_per_token": 9e-07,
|
|
"litellm_provider": "groq",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 9e-07,
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": false,
|
|
"supports_tool_choice": true
|
|
},
|
|
"groq/llama-3.2-90b-vision-preview": {
|
|
"deprecation_date": "2025-04-14",
|
|
"input_cost_per_token": 9e-07,
|
|
"litellm_provider": "groq",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 9e-07,
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": false,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"groq/llama-3.3-70b-specdec": {
|
|
"deprecation_date": "2025-04-14",
|
|
"input_cost_per_token": 5.9e-07,
|
|
"litellm_provider": "groq",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 9.9e-07,
|
|
"supports_tool_choice": true
|
|
},
|
|
"groq/llama-3.3-70b-versatile": {
|
|
"input_cost_per_token": 5.9e-07,
|
|
"litellm_provider": "groq",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7.9e-07,
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": false,
|
|
"supports_tool_choice": true
|
|
},
|
|
"groq/llama-guard-3-8b": {
|
|
"input_cost_per_token": 2e-07,
|
|
"litellm_provider": "groq",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2e-07
|
|
},
|
|
"groq/llama2-70b-4096": {
|
|
"input_cost_per_token": 7e-07,
|
|
"litellm_provider": "groq",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 8e-07,
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": false,
|
|
"supports_tool_choice": true
|
|
},
|
|
"groq/llama3-groq-70b-8192-tool-use-preview": {
|
|
"deprecation_date": "2025-01-06",
|
|
"input_cost_per_token": 8.9e-07,
|
|
"litellm_provider": "groq",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 8.9e-07,
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": false,
|
|
"supports_tool_choice": true
|
|
},
|
|
"groq/llama3-groq-8b-8192-tool-use-preview": {
|
|
"deprecation_date": "2025-01-06",
|
|
"input_cost_per_token": 1.9e-07,
|
|
"litellm_provider": "groq",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.9e-07,
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": false,
|
|
"supports_tool_choice": true
|
|
},
|
|
"groq/meta-llama/llama-4-maverick-17b-128e-instruct": {
|
|
"input_cost_per_token": 2e-07,
|
|
"litellm_provider": "groq",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-07,
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"groq/meta-llama/llama-4-scout-17b-16e-instruct": {
|
|
"input_cost_per_token": 1.1e-07,
|
|
"litellm_provider": "groq",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3.4e-07,
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"groq/mistral-saba-24b": {
|
|
"input_cost_per_token": 7.9e-07,
|
|
"litellm_provider": "groq",
|
|
"max_input_tokens": 32000,
|
|
"max_output_tokens": 32000,
|
|
"max_tokens": 32000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7.9e-07
|
|
},
|
|
"groq/mixtral-8x7b-32768": {
|
|
"deprecation_date": "2025-03-20",
|
|
"input_cost_per_token": 2.4e-07,
|
|
"litellm_provider": "groq",
|
|
"max_input_tokens": 32768,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.4e-07,
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": false,
|
|
"supports_tool_choice": true
|
|
},
|
|
"groq/moonshotai/kimi-k2-instruct": {
|
|
"input_cost_per_token": 1e-06,
|
|
"litellm_provider": "groq",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-06,
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"groq/moonshotai/kimi-k2-instruct-0905": {
|
|
"input_cost_per_token": 1e-06,
|
|
"output_cost_per_token": 3e-06,
|
|
"cache_read_input_token_cost": 0.5e-06,
|
|
"litellm_provider": "groq",
|
|
"max_input_tokens": 262144,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 278528,
|
|
"mode": "chat",
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"groq/openai/gpt-oss-120b": {
|
|
"input_cost_per_token": 1.5e-07,
|
|
"litellm_provider": "groq",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 32766,
|
|
"max_tokens": 32766,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7.5e-07,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_web_search": true
|
|
},
|
|
"groq/openai/gpt-oss-20b": {
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "groq",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 5e-07,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_web_search": true
|
|
},
|
|
"groq/playai-tts": {
|
|
"input_cost_per_character": 5e-05,
|
|
"litellm_provider": "groq",
|
|
"max_input_tokens": 10000,
|
|
"max_output_tokens": 10000,
|
|
"max_tokens": 10000,
|
|
"mode": "audio_speech"
|
|
},
|
|
"groq/qwen/qwen3-32b": {
|
|
"input_cost_per_token": 2.9e-07,
|
|
"litellm_provider": "groq",
|
|
"max_input_tokens": 131000,
|
|
"max_output_tokens": 131000,
|
|
"max_tokens": 131000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 5.9e-07,
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": false,
|
|
"supports_tool_choice": true
|
|
},
|
|
"groq/whisper-large-v3": {
|
|
"input_cost_per_second": 3.083e-05,
|
|
"litellm_provider": "groq",
|
|
"mode": "audio_transcription",
|
|
"output_cost_per_second": 0.0
|
|
},
|
|
"groq/whisper-large-v3-turbo": {
|
|
"input_cost_per_second": 1.111e-05,
|
|
"litellm_provider": "groq",
|
|
"mode": "audio_transcription",
|
|
"output_cost_per_second": 0.0
|
|
},
|
|
"hd/1024-x-1024/dall-e-3": {
|
|
"input_cost_per_pixel": 7.629e-08,
|
|
"litellm_provider": "openai",
|
|
"mode": "image_generation",
|
|
"output_cost_per_pixel": 0.0
|
|
},
|
|
"hd/1024-x-1792/dall-e-3": {
|
|
"input_cost_per_pixel": 6.539e-08,
|
|
"litellm_provider": "openai",
|
|
"mode": "image_generation",
|
|
"output_cost_per_pixel": 0.0
|
|
},
|
|
"hd/1792-x-1024/dall-e-3": {
|
|
"input_cost_per_pixel": 6.539e-08,
|
|
"litellm_provider": "openai",
|
|
"mode": "image_generation",
|
|
"output_cost_per_pixel": 0.0
|
|
},
|
|
"heroku/claude-3-5-haiku": {
|
|
"litellm_provider": "heroku",
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"supports_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"heroku/claude-3-5-sonnet-latest": {
|
|
"litellm_provider": "heroku",
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"supports_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"heroku/claude-3-7-sonnet": {
|
|
"litellm_provider": "heroku",
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"supports_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"heroku/claude-4-sonnet": {
|
|
"litellm_provider": "heroku",
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"supports_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"high/1024-x-1024/gpt-image-1": {
|
|
"input_cost_per_pixel": 1.59263611e-07,
|
|
"litellm_provider": "openai",
|
|
"mode": "image_generation",
|
|
"output_cost_per_pixel": 0.0,
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"high/1024-x-1536/gpt-image-1": {
|
|
"input_cost_per_pixel": 1.58945719e-07,
|
|
"litellm_provider": "openai",
|
|
"mode": "image_generation",
|
|
"output_cost_per_pixel": 0.0,
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"high/1536-x-1024/gpt-image-1": {
|
|
"input_cost_per_pixel": 1.58945719e-07,
|
|
"litellm_provider": "openai",
|
|
"mode": "image_generation",
|
|
"output_cost_per_pixel": 0.0,
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"hyperbolic/NousResearch/Hermes-3-Llama-3.1-70B": {
|
|
"input_cost_per_token": 1.2e-07,
|
|
"litellm_provider": "hyperbolic",
|
|
"max_input_tokens": 32768,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-07,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"hyperbolic/Qwen/QwQ-32B": {
|
|
"input_cost_per_token": 2e-07,
|
|
"litellm_provider": "hyperbolic",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2e-07,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"hyperbolic/Qwen/Qwen2.5-72B-Instruct": {
|
|
"input_cost_per_token": 1.2e-07,
|
|
"litellm_provider": "hyperbolic",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-07,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"hyperbolic/Qwen/Qwen2.5-Coder-32B-Instruct": {
|
|
"input_cost_per_token": 1.2e-07,
|
|
"litellm_provider": "hyperbolic",
|
|
"max_input_tokens": 32768,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-07,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"hyperbolic/Qwen/Qwen3-235B-A22B": {
|
|
"input_cost_per_token": 2e-06,
|
|
"litellm_provider": "hyperbolic",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2e-06,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"hyperbolic/deepseek-ai/DeepSeek-R1": {
|
|
"input_cost_per_token": 4e-07,
|
|
"litellm_provider": "hyperbolic",
|
|
"max_input_tokens": 32768,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4e-07,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"hyperbolic/deepseek-ai/DeepSeek-R1-0528": {
|
|
"input_cost_per_token": 2.5e-07,
|
|
"litellm_provider": "hyperbolic",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.5e-07,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"hyperbolic/deepseek-ai/DeepSeek-V3": {
|
|
"input_cost_per_token": 2e-07,
|
|
"litellm_provider": "hyperbolic",
|
|
"max_input_tokens": 32768,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2e-07,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"hyperbolic/deepseek-ai/DeepSeek-V3-0324": {
|
|
"input_cost_per_token": 4e-07,
|
|
"litellm_provider": "hyperbolic",
|
|
"max_input_tokens": 32768,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4e-07,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"hyperbolic/meta-llama/Llama-3.2-3B-Instruct": {
|
|
"input_cost_per_token": 1.2e-07,
|
|
"litellm_provider": "hyperbolic",
|
|
"max_input_tokens": 32768,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-07,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"hyperbolic/meta-llama/Llama-3.3-70B-Instruct": {
|
|
"input_cost_per_token": 1.2e-07,
|
|
"litellm_provider": "hyperbolic",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-07,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"hyperbolic/meta-llama/Meta-Llama-3-70B-Instruct": {
|
|
"input_cost_per_token": 1.2e-07,
|
|
"litellm_provider": "hyperbolic",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-07,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"hyperbolic/meta-llama/Meta-Llama-3.1-405B-Instruct": {
|
|
"input_cost_per_token": 1.2e-07,
|
|
"litellm_provider": "hyperbolic",
|
|
"max_input_tokens": 32768,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-07,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"hyperbolic/meta-llama/Meta-Llama-3.1-70B-Instruct": {
|
|
"input_cost_per_token": 1.2e-07,
|
|
"litellm_provider": "hyperbolic",
|
|
"max_input_tokens": 32768,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-07,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"hyperbolic/meta-llama/Meta-Llama-3.1-8B-Instruct": {
|
|
"input_cost_per_token": 1.2e-07,
|
|
"litellm_provider": "hyperbolic",
|
|
"max_input_tokens": 32768,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-07,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"hyperbolic/moonshotai/Kimi-K2-Instruct": {
|
|
"input_cost_per_token": 2e-06,
|
|
"litellm_provider": "hyperbolic",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2e-06,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"j2-light": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "ai21",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "completion",
|
|
"output_cost_per_token": 3e-06
|
|
},
|
|
"j2-mid": {
|
|
"input_cost_per_token": 1e-05,
|
|
"litellm_provider": "ai21",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "completion",
|
|
"output_cost_per_token": 1e-05
|
|
},
|
|
"j2-ultra": {
|
|
"input_cost_per_token": 1.5e-05,
|
|
"litellm_provider": "ai21",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "completion",
|
|
"output_cost_per_token": 1.5e-05
|
|
},
|
|
"jamba-1.5": {
|
|
"input_cost_per_token": 2e-07,
|
|
"litellm_provider": "ai21",
|
|
"max_input_tokens": 256000,
|
|
"max_output_tokens": 256000,
|
|
"max_tokens": 256000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4e-07,
|
|
"supports_tool_choice": true
|
|
},
|
|
"jamba-1.5-large": {
|
|
"input_cost_per_token": 2e-06,
|
|
"litellm_provider": "ai21",
|
|
"max_input_tokens": 256000,
|
|
"max_output_tokens": 256000,
|
|
"max_tokens": 256000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 8e-06,
|
|
"supports_tool_choice": true
|
|
},
|
|
"jamba-1.5-large@001": {
|
|
"input_cost_per_token": 2e-06,
|
|
"litellm_provider": "ai21",
|
|
"max_input_tokens": 256000,
|
|
"max_output_tokens": 256000,
|
|
"max_tokens": 256000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 8e-06,
|
|
"supports_tool_choice": true
|
|
},
|
|
"jamba-1.5-mini": {
|
|
"input_cost_per_token": 2e-07,
|
|
"litellm_provider": "ai21",
|
|
"max_input_tokens": 256000,
|
|
"max_output_tokens": 256000,
|
|
"max_tokens": 256000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4e-07,
|
|
"supports_tool_choice": true
|
|
},
|
|
"jamba-1.5-mini@001": {
|
|
"input_cost_per_token": 2e-07,
|
|
"litellm_provider": "ai21",
|
|
"max_input_tokens": 256000,
|
|
"max_output_tokens": 256000,
|
|
"max_tokens": 256000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4e-07,
|
|
"supports_tool_choice": true
|
|
},
|
|
"jamba-large-1.6": {
|
|
"input_cost_per_token": 2e-06,
|
|
"litellm_provider": "ai21",
|
|
"max_input_tokens": 256000,
|
|
"max_output_tokens": 256000,
|
|
"max_tokens": 256000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 8e-06,
|
|
"supports_tool_choice": true
|
|
},
|
|
"jamba-large-1.7": {
|
|
"input_cost_per_token": 2e-06,
|
|
"litellm_provider": "ai21",
|
|
"max_input_tokens": 256000,
|
|
"max_output_tokens": 256000,
|
|
"max_tokens": 256000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 8e-06,
|
|
"supports_tool_choice": true
|
|
},
|
|
"jamba-mini-1.6": {
|
|
"input_cost_per_token": 2e-07,
|
|
"litellm_provider": "ai21",
|
|
"max_input_tokens": 256000,
|
|
"max_output_tokens": 256000,
|
|
"max_tokens": 256000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4e-07,
|
|
"supports_tool_choice": true
|
|
},
|
|
"jamba-mini-1.7": {
|
|
"input_cost_per_token": 2e-07,
|
|
"litellm_provider": "ai21",
|
|
"max_input_tokens": 256000,
|
|
"max_output_tokens": 256000,
|
|
"max_tokens": 256000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4e-07,
|
|
"supports_tool_choice": true
|
|
},
|
|
"jina-reranker-v2-base-multilingual": {
|
|
"input_cost_per_token": 1.8e-08,
|
|
"litellm_provider": "jina_ai",
|
|
"max_document_chunks_per_query": 2048,
|
|
"max_input_tokens": 1024,
|
|
"max_output_tokens": 1024,
|
|
"max_tokens": 1024,
|
|
"mode": "rerank",
|
|
"output_cost_per_token": 1.8e-08
|
|
},
|
|
"jp.anthropic.claude-sonnet-4-5-20250929-v1:0": {
|
|
"cache_creation_input_token_cost": 4.125e-06,
|
|
"cache_read_input_token_cost": 3.3e-07,
|
|
"input_cost_per_token": 3.3e-06,
|
|
"input_cost_per_token_above_200k_tokens": 6.6e-06,
|
|
"output_cost_per_token_above_200k_tokens": 2.475e-05,
|
|
"cache_creation_input_token_cost_above_200k_tokens": 8.25e-06,
|
|
"cache_read_input_token_cost_above_200k_tokens": 6.6e-07,
|
|
"litellm_provider": "bedrock_converse",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 64000,
|
|
"max_tokens": 64000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.65e-05,
|
|
"search_context_cost_per_query": {
|
|
"search_context_size_high": 0.01,
|
|
"search_context_size_low": 0.01,
|
|
"search_context_size_medium": 0.01
|
|
},
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tool_use_system_prompt_tokens": 346
|
|
},
|
|
"jp.anthropic.claude-haiku-4-5-20251001-v1:0": {
|
|
"cache_creation_input_token_cost": 1.375e-06,
|
|
"cache_read_input_token_cost": 1.1e-07,
|
|
"input_cost_per_token": 1.1e-06,
|
|
"litellm_provider": "bedrock_converse",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 64000,
|
|
"max_tokens": 64000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 5.5e-06,
|
|
"source": "https://aws.amazon.com/about-aws/whats-new/2025/10/claude-4-5-haiku-anthropic-amazon-bedrock",
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tool_use_system_prompt_tokens": 346
|
|
},
|
|
"lambda_ai/deepseek-llama3.3-70b": {
|
|
"input_cost_per_token": 2e-07,
|
|
"litellm_provider": "lambda_ai",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-07,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"lambda_ai/deepseek-r1-0528": {
|
|
"input_cost_per_token": 2e-07,
|
|
"litellm_provider": "lambda_ai",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-07,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"lambda_ai/deepseek-r1-671b": {
|
|
"input_cost_per_token": 8e-07,
|
|
"litellm_provider": "lambda_ai",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 8e-07,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"lambda_ai/deepseek-v3-0324": {
|
|
"input_cost_per_token": 2e-07,
|
|
"litellm_provider": "lambda_ai",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-07,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"lambda_ai/hermes3-405b": {
|
|
"input_cost_per_token": 8e-07,
|
|
"litellm_provider": "lambda_ai",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 8e-07,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"lambda_ai/hermes3-70b": {
|
|
"input_cost_per_token": 1.2e-07,
|
|
"litellm_provider": "lambda_ai",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-07,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"lambda_ai/hermes3-8b": {
|
|
"input_cost_per_token": 2.5e-08,
|
|
"litellm_provider": "lambda_ai",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4e-08,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"lambda_ai/lfm-40b": {
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "lambda_ai",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2e-07,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"lambda_ai/lfm-7b": {
|
|
"input_cost_per_token": 2.5e-08,
|
|
"litellm_provider": "lambda_ai",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4e-08,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"lambda_ai/llama-4-maverick-17b-128e-instruct-fp8": {
|
|
"input_cost_per_token": 5e-08,
|
|
"litellm_provider": "lambda_ai",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-07,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"lambda_ai/llama-4-scout-17b-16e-instruct": {
|
|
"input_cost_per_token": 5e-08,
|
|
"litellm_provider": "lambda_ai",
|
|
"max_input_tokens": 16384,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-07,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"lambda_ai/llama3.1-405b-instruct-fp8": {
|
|
"input_cost_per_token": 8e-07,
|
|
"litellm_provider": "lambda_ai",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 8e-07,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"lambda_ai/llama3.1-70b-instruct-fp8": {
|
|
"input_cost_per_token": 1.2e-07,
|
|
"litellm_provider": "lambda_ai",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-07,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"lambda_ai/llama3.1-8b-instruct": {
|
|
"input_cost_per_token": 2.5e-08,
|
|
"litellm_provider": "lambda_ai",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4e-08,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"lambda_ai/llama3.1-nemotron-70b-instruct-fp8": {
|
|
"input_cost_per_token": 1.2e-07,
|
|
"litellm_provider": "lambda_ai",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-07,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"lambda_ai/llama3.2-11b-vision-instruct": {
|
|
"input_cost_per_token": 1.5e-08,
|
|
"litellm_provider": "lambda_ai",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.5e-08,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"lambda_ai/llama3.2-3b-instruct": {
|
|
"input_cost_per_token": 1.5e-08,
|
|
"litellm_provider": "lambda_ai",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.5e-08,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"lambda_ai/llama3.3-70b-instruct-fp8": {
|
|
"input_cost_per_token": 1.2e-07,
|
|
"litellm_provider": "lambda_ai",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-07,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"lambda_ai/qwen25-coder-32b-instruct": {
|
|
"input_cost_per_token": 5e-08,
|
|
"litellm_provider": "lambda_ai",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-07,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"lambda_ai/qwen3-32b-fp8": {
|
|
"input_cost_per_token": 5e-08,
|
|
"litellm_provider": "lambda_ai",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-07,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"low/1024-x-1024/gpt-image-1": {
|
|
"input_cost_per_pixel": 1.0490417e-08,
|
|
"litellm_provider": "openai",
|
|
"mode": "image_generation",
|
|
"output_cost_per_pixel": 0.0,
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"low/1024-x-1536/gpt-image-1": {
|
|
"input_cost_per_pixel": 1.0172526e-08,
|
|
"litellm_provider": "openai",
|
|
"mode": "image_generation",
|
|
"output_cost_per_pixel": 0.0,
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"low/1536-x-1024/gpt-image-1": {
|
|
"input_cost_per_pixel": 1.0172526e-08,
|
|
"litellm_provider": "openai",
|
|
"mode": "image_generation",
|
|
"output_cost_per_pixel": 0.0,
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"luminous-base": {
|
|
"input_cost_per_token": 3e-05,
|
|
"litellm_provider": "aleph_alpha",
|
|
"max_tokens": 2048,
|
|
"mode": "completion",
|
|
"output_cost_per_token": 3.3e-05
|
|
},
|
|
"luminous-base-control": {
|
|
"input_cost_per_token": 3.75e-05,
|
|
"litellm_provider": "aleph_alpha",
|
|
"max_tokens": 2048,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4.125e-05
|
|
},
|
|
"luminous-extended": {
|
|
"input_cost_per_token": 4.5e-05,
|
|
"litellm_provider": "aleph_alpha",
|
|
"max_tokens": 2048,
|
|
"mode": "completion",
|
|
"output_cost_per_token": 4.95e-05
|
|
},
|
|
"luminous-extended-control": {
|
|
"input_cost_per_token": 5.625e-05,
|
|
"litellm_provider": "aleph_alpha",
|
|
"max_tokens": 2048,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6.1875e-05
|
|
},
|
|
"luminous-supreme": {
|
|
"input_cost_per_token": 0.000175,
|
|
"litellm_provider": "aleph_alpha",
|
|
"max_tokens": 2048,
|
|
"mode": "completion",
|
|
"output_cost_per_token": 0.0001925
|
|
},
|
|
"luminous-supreme-control": {
|
|
"input_cost_per_token": 0.00021875,
|
|
"litellm_provider": "aleph_alpha",
|
|
"max_tokens": 2048,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.000240625
|
|
},
|
|
"max-x-max/50-steps/stability.stable-diffusion-xl-v0": {
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 77,
|
|
"max_tokens": 77,
|
|
"mode": "image_generation",
|
|
"output_cost_per_image": 0.036
|
|
},
|
|
"max-x-max/max-steps/stability.stable-diffusion-xl-v0": {
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 77,
|
|
"max_tokens": 77,
|
|
"mode": "image_generation",
|
|
"output_cost_per_image": 0.072
|
|
},
|
|
"medium/1024-x-1024/gpt-image-1": {
|
|
"input_cost_per_pixel": 4.0054321e-08,
|
|
"litellm_provider": "openai",
|
|
"mode": "image_generation",
|
|
"output_cost_per_pixel": 0.0,
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"medium/1024-x-1536/gpt-image-1": {
|
|
"input_cost_per_pixel": 4.0054321e-08,
|
|
"litellm_provider": "openai",
|
|
"mode": "image_generation",
|
|
"output_cost_per_pixel": 0.0,
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"medium/1536-x-1024/gpt-image-1": {
|
|
"input_cost_per_pixel": 4.0054321e-08,
|
|
"litellm_provider": "openai",
|
|
"mode": "image_generation",
|
|
"output_cost_per_pixel": 0.0,
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"low/1024-x-1024/gpt-image-1-mini": {
|
|
"input_cost_per_image": 0.005,
|
|
"litellm_provider": "openai",
|
|
"mode": "image_generation",
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"low/1024-x-1536/gpt-image-1-mini": {
|
|
"input_cost_per_image": 0.006,
|
|
"litellm_provider": "openai",
|
|
"mode": "image_generation",
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"low/1536-x-1024/gpt-image-1-mini": {
|
|
"input_cost_per_image": 0.006,
|
|
"litellm_provider": "openai",
|
|
"mode": "image_generation",
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"medium/1024-x-1024/gpt-image-1-mini": {
|
|
"input_cost_per_image": 0.011,
|
|
"litellm_provider": "openai",
|
|
"mode": "image_generation",
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"medium/1024-x-1536/gpt-image-1-mini": {
|
|
"input_cost_per_image": 0.015,
|
|
"litellm_provider": "openai",
|
|
"mode": "image_generation",
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"medium/1536-x-1024/gpt-image-1-mini": {
|
|
"input_cost_per_image": 0.015,
|
|
"litellm_provider": "openai",
|
|
"mode": "image_generation",
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"medlm-large": {
|
|
"input_cost_per_character": 5e-06,
|
|
"litellm_provider": "vertex_ai-language-models",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 1024,
|
|
"max_tokens": 1024,
|
|
"mode": "chat",
|
|
"output_cost_per_character": 1.5e-05,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models",
|
|
"supports_tool_choice": true
|
|
},
|
|
"medlm-medium": {
|
|
"input_cost_per_character": 5e-07,
|
|
"litellm_provider": "vertex_ai-language-models",
|
|
"max_input_tokens": 32768,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_character": 1e-06,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models",
|
|
"supports_tool_choice": true
|
|
},
|
|
"meta.llama2-13b-chat-v1": {
|
|
"input_cost_per_token": 7.5e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-06
|
|
},
|
|
"meta.llama2-70b-chat-v1": {
|
|
"input_cost_per_token": 1.95e-06,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.56e-06
|
|
},
|
|
"meta.llama3-1-405b-instruct-v1:0": {
|
|
"input_cost_per_token": 5.32e-06,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.6e-05,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": false
|
|
},
|
|
"meta.llama3-1-70b-instruct-v1:0": {
|
|
"input_cost_per_token": 9.9e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 2048,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 9.9e-07,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": false
|
|
},
|
|
"meta.llama3-1-8b-instruct-v1:0": {
|
|
"input_cost_per_token": 2.2e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 2048,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.2e-07,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": false
|
|
},
|
|
"meta.llama3-2-11b-instruct-v1:0": {
|
|
"input_cost_per_token": 3.5e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3.5e-07,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": false,
|
|
"supports_vision": true
|
|
},
|
|
"meta.llama3-2-1b-instruct-v1:0": {
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-07,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": false
|
|
},
|
|
"meta.llama3-2-3b-instruct-v1:0": {
|
|
"input_cost_per_token": 1.5e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-07,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": false
|
|
},
|
|
"meta.llama3-2-90b-instruct-v1:0": {
|
|
"input_cost_per_token": 2e-06,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2e-06,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": false,
|
|
"supports_vision": true
|
|
},
|
|
"meta.llama3-3-70b-instruct-v1:0": {
|
|
"input_cost_per_token": 7.2e-07,
|
|
"litellm_provider": "bedrock_converse",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7.2e-07,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": false
|
|
},
|
|
"meta.llama3-70b-instruct-v1:0": {
|
|
"input_cost_per_token": 2.65e-06,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3.5e-06
|
|
},
|
|
"meta.llama3-8b-instruct-v1:0": {
|
|
"input_cost_per_token": 3e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-07
|
|
},
|
|
"meta.llama4-maverick-17b-instruct-v1:0": {
|
|
"input_cost_per_token": 2.4e-07,
|
|
"input_cost_per_token_batches": 1.2e-07,
|
|
"litellm_provider": "bedrock_converse",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 9.7e-07,
|
|
"output_cost_per_token_batches": 4.85e-07,
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text",
|
|
"code"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": false
|
|
},
|
|
"meta.llama4-scout-17b-instruct-v1:0": {
|
|
"input_cost_per_token": 1.7e-07,
|
|
"input_cost_per_token_batches": 8.5e-08,
|
|
"litellm_provider": "bedrock_converse",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6.6e-07,
|
|
"output_cost_per_token_batches": 3.3e-07,
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text",
|
|
"code"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": false
|
|
},
|
|
"meta_llama/Llama-3.3-70B-Instruct": {
|
|
"litellm_provider": "meta_llama",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4028,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"source": "https://llama.developer.meta.com/docs/models",
|
|
"supported_modalities": [
|
|
"text"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"meta_llama/Llama-3.3-8B-Instruct": {
|
|
"litellm_provider": "meta_llama",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4028,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"source": "https://llama.developer.meta.com/docs/models",
|
|
"supported_modalities": [
|
|
"text"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"meta_llama/Llama-4-Maverick-17B-128E-Instruct-FP8": {
|
|
"litellm_provider": "meta_llama",
|
|
"max_input_tokens": 1000000,
|
|
"max_output_tokens": 4028,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"source": "https://llama.developer.meta.com/docs/models",
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"meta_llama/Llama-4-Scout-17B-16E-Instruct-FP8": {
|
|
"litellm_provider": "meta_llama",
|
|
"max_input_tokens": 10000000,
|
|
"max_output_tokens": 4028,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"source": "https://llama.developer.meta.com/docs/models",
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"mistral.mistral-7b-instruct-v0:2": {
|
|
"input_cost_per_token": 1.5e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 32000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2e-07,
|
|
"supports_tool_choice": true
|
|
},
|
|
"mistral.mistral-large-2402-v1:0": {
|
|
"input_cost_per_token": 8e-06,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 32000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.4e-05,
|
|
"supports_function_calling": true
|
|
},
|
|
"mistral.mistral-large-2407-v1:0": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 9e-06,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"mistral.mistral-small-2402-v1:0": {
|
|
"input_cost_per_token": 1e-06,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 32000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-06,
|
|
"supports_function_calling": true
|
|
},
|
|
"mistral.mixtral-8x7b-instruct-v0:1": {
|
|
"input_cost_per_token": 4.5e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 32000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7e-07,
|
|
"supports_tool_choice": true
|
|
},
|
|
"mistral/codestral-2405": {
|
|
"input_cost_per_token": 1e-06,
|
|
"litellm_provider": "mistral",
|
|
"max_input_tokens": 32000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-06,
|
|
"supports_assistant_prefill": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"mistral/codestral-latest": {
|
|
"input_cost_per_token": 1e-06,
|
|
"litellm_provider": "mistral",
|
|
"max_input_tokens": 32000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-06,
|
|
"supports_assistant_prefill": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"mistral/codestral-mamba-latest": {
|
|
"input_cost_per_token": 2.5e-07,
|
|
"litellm_provider": "mistral",
|
|
"max_input_tokens": 256000,
|
|
"max_output_tokens": 256000,
|
|
"max_tokens": 256000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.5e-07,
|
|
"source": "https://mistral.ai/technology/",
|
|
"supports_assistant_prefill": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"mistral/devstral-medium-2507": {
|
|
"input_cost_per_token": 4e-07,
|
|
"litellm_provider": "mistral",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2e-06,
|
|
"source": "https://mistral.ai/news/devstral",
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"mistral/devstral-small-2505": {
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "mistral",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-07,
|
|
"source": "https://mistral.ai/news/devstral",
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"mistral/devstral-small-2507": {
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "mistral",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-07,
|
|
"source": "https://mistral.ai/news/devstral",
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"mistral/magistral-medium-2506": {
|
|
"input_cost_per_token": 2e-06,
|
|
"litellm_provider": "mistral",
|
|
"max_input_tokens": 40000,
|
|
"max_output_tokens": 40000,
|
|
"max_tokens": 40000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 5e-06,
|
|
"source": "https://mistral.ai/news/magistral",
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"mistral/magistral-medium-2509": {
|
|
"input_cost_per_token": 2e-06,
|
|
"litellm_provider": "mistral",
|
|
"max_input_tokens": 40000,
|
|
"max_output_tokens": 40000,
|
|
"max_tokens": 40000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 5e-06,
|
|
"source": "https://mistral.ai/news/magistral",
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"mistral/mistral-ocr-latest": {
|
|
"litellm_provider": "mistral",
|
|
"ocr_cost_per_page": 1e-3,
|
|
"annotation_cost_per_page": 3e-3,
|
|
"mode": "ocr",
|
|
"supported_endpoints": [
|
|
"/v1/ocr"
|
|
],
|
|
"source": "https://mistral.ai/pricing#api-pricing"
|
|
},
|
|
"mistral/mistral-ocr-2505-completion": {
|
|
"litellm_provider": "mistral",
|
|
"ocr_cost_per_page": 1e-3,
|
|
"annotation_cost_per_page": 3e-3,
|
|
"mode": "ocr",
|
|
"supported_endpoints": [
|
|
"/v1/ocr"
|
|
],
|
|
"source": "https://mistral.ai/pricing#api-pricing"
|
|
},
|
|
"mistral/magistral-medium-latest": {
|
|
"input_cost_per_token": 2e-06,
|
|
"litellm_provider": "mistral",
|
|
"max_input_tokens": 40000,
|
|
"max_output_tokens": 40000,
|
|
"max_tokens": 40000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 5e-06,
|
|
"source": "https://mistral.ai/news/magistral",
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"mistral/magistral-small-2506": {
|
|
"input_cost_per_token": 5e-07,
|
|
"litellm_provider": "mistral",
|
|
"max_input_tokens": 40000,
|
|
"max_output_tokens": 40000,
|
|
"max_tokens": 40000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-06,
|
|
"source": "https://mistral.ai/pricing#api-pricing",
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"mistral/magistral-small-latest": {
|
|
"input_cost_per_token": 5e-07,
|
|
"litellm_provider": "mistral",
|
|
"max_input_tokens": 40000,
|
|
"max_output_tokens": 40000,
|
|
"max_tokens": 40000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-06,
|
|
"source": "https://mistral.ai/pricing#api-pricing",
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"mistral/mistral-embed": {
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "mistral",
|
|
"max_input_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "embedding"
|
|
},
|
|
"mistral/codestral-embed": {
|
|
"input_cost_per_token": 0.15e-06,
|
|
"litellm_provider": "mistral",
|
|
"max_input_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "embedding"
|
|
},
|
|
"mistral/codestral-embed-2505": {
|
|
"input_cost_per_token": 0.15e-06,
|
|
"litellm_provider": "mistral",
|
|
"max_input_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "embedding"
|
|
},
|
|
"mistral/mistral-large-2402": {
|
|
"input_cost_per_token": 4e-06,
|
|
"litellm_provider": "mistral",
|
|
"max_input_tokens": 32000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.2e-05,
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"mistral/mistral-large-2407": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "mistral",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 9e-06,
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"mistral/mistral-large-2411": {
|
|
"input_cost_per_token": 2e-06,
|
|
"litellm_provider": "mistral",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-06,
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"mistral/mistral-large-latest": {
|
|
"input_cost_per_token": 2e-06,
|
|
"litellm_provider": "mistral",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-06,
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"mistral/mistral-large-3": {
|
|
"input_cost_per_token": 5e-07,
|
|
"litellm_provider": "mistral",
|
|
"max_input_tokens": 256000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-06,
|
|
"source": "https://docs.mistral.ai/models/mistral-large-3-25-12",
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"mistral/mistral-medium": {
|
|
"input_cost_per_token": 2.7e-06,
|
|
"litellm_provider": "mistral",
|
|
"max_input_tokens": 32000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 8.1e-06,
|
|
"supports_assistant_prefill": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"mistral/mistral-medium-2312": {
|
|
"input_cost_per_token": 2.7e-06,
|
|
"litellm_provider": "mistral",
|
|
"max_input_tokens": 32000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 8.1e-06,
|
|
"supports_assistant_prefill": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"mistral/mistral-medium-2505": {
|
|
"input_cost_per_token": 4e-07,
|
|
"litellm_provider": "mistral",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2e-06,
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"mistral/mistral-medium-latest": {
|
|
"input_cost_per_token": 4e-07,
|
|
"litellm_provider": "mistral",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2e-06,
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"mistral/mistral-small": {
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "mistral",
|
|
"max_input_tokens": 32000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-07,
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"mistral/mistral-small-latest": {
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "mistral",
|
|
"max_input_tokens": 32000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-07,
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"mistral/mistral-tiny": {
|
|
"input_cost_per_token": 2.5e-07,
|
|
"litellm_provider": "mistral",
|
|
"max_input_tokens": 32000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.5e-07,
|
|
"supports_assistant_prefill": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"mistral/open-codestral-mamba": {
|
|
"input_cost_per_token": 2.5e-07,
|
|
"litellm_provider": "mistral",
|
|
"max_input_tokens": 256000,
|
|
"max_output_tokens": 256000,
|
|
"max_tokens": 256000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.5e-07,
|
|
"source": "https://mistral.ai/technology/",
|
|
"supports_assistant_prefill": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"mistral/open-mistral-7b": {
|
|
"input_cost_per_token": 2.5e-07,
|
|
"litellm_provider": "mistral",
|
|
"max_input_tokens": 32000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.5e-07,
|
|
"supports_assistant_prefill": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"mistral/open-mistral-nemo": {
|
|
"input_cost_per_token": 3e-07,
|
|
"litellm_provider": "mistral",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-07,
|
|
"source": "https://mistral.ai/technology/",
|
|
"supports_assistant_prefill": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"mistral/open-mistral-nemo-2407": {
|
|
"input_cost_per_token": 3e-07,
|
|
"litellm_provider": "mistral",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-07,
|
|
"source": "https://mistral.ai/technology/",
|
|
"supports_assistant_prefill": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"mistral/open-mixtral-8x22b": {
|
|
"input_cost_per_token": 2e-06,
|
|
"litellm_provider": "mistral",
|
|
"max_input_tokens": 65336,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-06,
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"mistral/open-mixtral-8x7b": {
|
|
"input_cost_per_token": 7e-07,
|
|
"litellm_provider": "mistral",
|
|
"max_input_tokens": 32000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7e-07,
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"mistral/pixtral-12b-2409": {
|
|
"input_cost_per_token": 1.5e-07,
|
|
"litellm_provider": "mistral",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-07,
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"mistral/pixtral-large-2411": {
|
|
"input_cost_per_token": 2e-06,
|
|
"litellm_provider": "mistral",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-06,
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"mistral/pixtral-large-latest": {
|
|
"input_cost_per_token": 2e-06,
|
|
"litellm_provider": "mistral",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-06,
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"moonshot/kimi-k2-0711-preview": {
|
|
"cache_read_input_token_cost": 1.5e-07,
|
|
"input_cost_per_token": 6e-07,
|
|
"litellm_provider": "moonshot",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.5e-06,
|
|
"source": "https://platform.moonshot.ai/docs/pricing/chat#generation-model-kimi-k2",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_web_search": true
|
|
},
|
|
"moonshot/kimi-k2-0905-preview": {
|
|
"cache_read_input_token_cost": 1.5e-07,
|
|
"input_cost_per_token": 6e-07,
|
|
"litellm_provider": "moonshot",
|
|
"max_input_tokens": 262144,
|
|
"max_output_tokens": 262144,
|
|
"max_tokens": 262144,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.5e-06,
|
|
"source": "https://platform.moonshot.ai/docs/pricing/chat#generation-model-kimi-k2",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_web_search": true
|
|
},
|
|
"moonshot/kimi-k2-turbo-preview": {
|
|
"cache_read_input_token_cost": 1.5e-07,
|
|
"input_cost_per_token": 1.15e-06,
|
|
"litellm_provider": "moonshot",
|
|
"max_input_tokens": 262144,
|
|
"max_output_tokens": 262144,
|
|
"max_tokens": 262144,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 8e-06,
|
|
"source": "https://platform.moonshot.ai/docs/pricing/chat#generation-model-kimi-k2",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_web_search": true
|
|
},
|
|
"moonshot/kimi-latest": {
|
|
"cache_read_input_token_cost": 1.5e-07,
|
|
"input_cost_per_token": 2e-06,
|
|
"litellm_provider": "moonshot",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 5e-06,
|
|
"source": "https://platform.moonshot.ai/docs/pricing",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"moonshot/kimi-latest-128k": {
|
|
"cache_read_input_token_cost": 1.5e-07,
|
|
"input_cost_per_token": 2e-06,
|
|
"litellm_provider": "moonshot",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 5e-06,
|
|
"source": "https://platform.moonshot.ai/docs/pricing",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"moonshot/kimi-latest-32k": {
|
|
"cache_read_input_token_cost": 1.5e-07,
|
|
"input_cost_per_token": 1e-06,
|
|
"litellm_provider": "moonshot",
|
|
"max_input_tokens": 32768,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-06,
|
|
"source": "https://platform.moonshot.ai/docs/pricing",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"moonshot/kimi-latest-8k": {
|
|
"cache_read_input_token_cost": 1.5e-07,
|
|
"input_cost_per_token": 2e-07,
|
|
"litellm_provider": "moonshot",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2e-06,
|
|
"source": "https://platform.moonshot.ai/docs/pricing",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"moonshot/kimi-thinking-preview": {
|
|
"cache_read_input_token_cost": 1.5e-07,
|
|
"input_cost_per_token": 6e-07,
|
|
"litellm_provider": "moonshot",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.5e-06,
|
|
"source": "https://platform.moonshot.ai/docs/pricing/chat#generation-model-kimi-k2",
|
|
"supports_vision": true
|
|
},
|
|
"moonshot/kimi-k2-thinking": {
|
|
"cache_read_input_token_cost": 1.5e-7,
|
|
"input_cost_per_token": 6e-7,
|
|
"litellm_provider": "moonshot",
|
|
"max_input_tokens": 262144,
|
|
"max_output_tokens": 262144,
|
|
"max_tokens": 262144,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.5e-6,
|
|
"source": "https://platform.moonshot.ai/docs/pricing/chat#generation-model-kimi-k2",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_web_search": true
|
|
},
|
|
"moonshot/kimi-k2-thinking-turbo": {
|
|
"cache_read_input_token_cost": 1.5e-7,
|
|
"input_cost_per_token": 1.15e-6,
|
|
"litellm_provider": "moonshot",
|
|
"max_input_tokens": 262144,
|
|
"max_output_tokens": 262144,
|
|
"max_tokens": 262144,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 8e-6,
|
|
"source": "https://platform.moonshot.ai/docs/pricing/chat#generation-model-kimi-k2",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_web_search": true
|
|
},
|
|
"moonshot/moonshot-v1-128k": {
|
|
"input_cost_per_token": 2e-06,
|
|
"litellm_provider": "moonshot",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 5e-06,
|
|
"source": "https://platform.moonshot.ai/docs/pricing",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"moonshot/moonshot-v1-128k-0430": {
|
|
"input_cost_per_token": 2e-06,
|
|
"litellm_provider": "moonshot",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 5e-06,
|
|
"source": "https://platform.moonshot.ai/docs/pricing",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"moonshot/moonshot-v1-128k-vision-preview": {
|
|
"input_cost_per_token": 2e-06,
|
|
"litellm_provider": "moonshot",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 5e-06,
|
|
"source": "https://platform.moonshot.ai/docs/pricing",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"moonshot/moonshot-v1-32k": {
|
|
"input_cost_per_token": 1e-06,
|
|
"litellm_provider": "moonshot",
|
|
"max_input_tokens": 32768,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-06,
|
|
"source": "https://platform.moonshot.ai/docs/pricing",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"moonshot/moonshot-v1-32k-0430": {
|
|
"input_cost_per_token": 1e-06,
|
|
"litellm_provider": "moonshot",
|
|
"max_input_tokens": 32768,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-06,
|
|
"source": "https://platform.moonshot.ai/docs/pricing",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"moonshot/moonshot-v1-32k-vision-preview": {
|
|
"input_cost_per_token": 1e-06,
|
|
"litellm_provider": "moonshot",
|
|
"max_input_tokens": 32768,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-06,
|
|
"source": "https://platform.moonshot.ai/docs/pricing",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"moonshot/moonshot-v1-8k": {
|
|
"input_cost_per_token": 2e-07,
|
|
"litellm_provider": "moonshot",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2e-06,
|
|
"source": "https://platform.moonshot.ai/docs/pricing",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"moonshot/moonshot-v1-8k-0430": {
|
|
"input_cost_per_token": 2e-07,
|
|
"litellm_provider": "moonshot",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2e-06,
|
|
"source": "https://platform.moonshot.ai/docs/pricing",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"moonshot/moonshot-v1-8k-vision-preview": {
|
|
"input_cost_per_token": 2e-07,
|
|
"litellm_provider": "moonshot",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2e-06,
|
|
"source": "https://platform.moonshot.ai/docs/pricing",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"moonshot/moonshot-v1-auto": {
|
|
"input_cost_per_token": 2e-06,
|
|
"litellm_provider": "moonshot",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 5e-06,
|
|
"source": "https://platform.moonshot.ai/docs/pricing",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"morph/morph-v3-fast": {
|
|
"input_cost_per_token": 8e-07,
|
|
"litellm_provider": "morph",
|
|
"max_input_tokens": 16000,
|
|
"max_output_tokens": 16000,
|
|
"max_tokens": 16000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.2e-06,
|
|
"supports_function_calling": false,
|
|
"supports_parallel_function_calling": false,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": false,
|
|
"supports_vision": false
|
|
},
|
|
"morph/morph-v3-large": {
|
|
"input_cost_per_token": 9e-07,
|
|
"litellm_provider": "morph",
|
|
"max_input_tokens": 16000,
|
|
"max_output_tokens": 16000,
|
|
"max_tokens": 16000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.9e-06,
|
|
"supports_function_calling": false,
|
|
"supports_parallel_function_calling": false,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": false,
|
|
"supports_vision": false
|
|
},
|
|
"multimodalembedding": {
|
|
"input_cost_per_character": 2e-07,
|
|
"input_cost_per_image": 0.0001,
|
|
"input_cost_per_token": 8e-07,
|
|
"input_cost_per_video_per_second": 0.0005,
|
|
"input_cost_per_video_per_second_above_15s_interval": 0.002,
|
|
"input_cost_per_video_per_second_above_8s_interval": 0.001,
|
|
"litellm_provider": "vertex_ai-embedding-models",
|
|
"max_input_tokens": 2048,
|
|
"max_tokens": 2048,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0,
|
|
"output_vector_size": 768,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models",
|
|
"supported_endpoints": [
|
|
"/v1/embeddings"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image",
|
|
"video"
|
|
]
|
|
},
|
|
"multimodalembedding@001": {
|
|
"input_cost_per_character": 2e-07,
|
|
"input_cost_per_image": 0.0001,
|
|
"input_cost_per_token": 8e-07,
|
|
"input_cost_per_video_per_second": 0.0005,
|
|
"input_cost_per_video_per_second_above_15s_interval": 0.002,
|
|
"input_cost_per_video_per_second_above_8s_interval": 0.001,
|
|
"litellm_provider": "vertex_ai-embedding-models",
|
|
"max_input_tokens": 2048,
|
|
"max_tokens": 2048,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0,
|
|
"output_vector_size": 768,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models",
|
|
"supported_endpoints": [
|
|
"/v1/embeddings"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image",
|
|
"video"
|
|
]
|
|
},
|
|
"nscale/Qwen/QwQ-32B": {
|
|
"input_cost_per_token": 1.8e-07,
|
|
"litellm_provider": "nscale",
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2e-07,
|
|
"source": "https://docs.nscale.com/docs/inference/serverless-models/current#chat-models"
|
|
},
|
|
"nscale/Qwen/Qwen2.5-Coder-32B-Instruct": {
|
|
"input_cost_per_token": 6e-08,
|
|
"litellm_provider": "nscale",
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2e-07,
|
|
"source": "https://docs.nscale.com/docs/inference/serverless-models/current#chat-models"
|
|
},
|
|
"nscale/Qwen/Qwen2.5-Coder-3B-Instruct": {
|
|
"input_cost_per_token": 1e-08,
|
|
"litellm_provider": "nscale",
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-08,
|
|
"source": "https://docs.nscale.com/docs/inference/serverless-models/current#chat-models"
|
|
},
|
|
"nscale/Qwen/Qwen2.5-Coder-7B-Instruct": {
|
|
"input_cost_per_token": 1e-08,
|
|
"litellm_provider": "nscale",
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-08,
|
|
"source": "https://docs.nscale.com/docs/inference/serverless-models/current#chat-models"
|
|
},
|
|
"nscale/black-forest-labs/FLUX.1-schnell": {
|
|
"input_cost_per_pixel": 1.3e-09,
|
|
"litellm_provider": "nscale",
|
|
"mode": "image_generation",
|
|
"output_cost_per_pixel": 0.0,
|
|
"source": "https://docs.nscale.com/docs/inference/serverless-models/current#image-models",
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"nscale/deepseek-ai/DeepSeek-R1-Distill-Llama-70B": {
|
|
"input_cost_per_token": 3.75e-07,
|
|
"litellm_provider": "nscale",
|
|
"metadata": {
|
|
"notes": "Pricing listed as $0.75/1M tokens total. Assumed 50/50 split for input/output."
|
|
},
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3.75e-07,
|
|
"source": "https://docs.nscale.com/docs/inference/serverless-models/current#chat-models"
|
|
},
|
|
"nscale/deepseek-ai/DeepSeek-R1-Distill-Llama-8B": {
|
|
"input_cost_per_token": 2.5e-08,
|
|
"litellm_provider": "nscale",
|
|
"metadata": {
|
|
"notes": "Pricing listed as $0.05/1M tokens total. Assumed 50/50 split for input/output."
|
|
},
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.5e-08,
|
|
"source": "https://docs.nscale.com/docs/inference/serverless-models/current#chat-models"
|
|
},
|
|
"nscale/deepseek-ai/DeepSeek-R1-Distill-Qwen-1.5B": {
|
|
"input_cost_per_token": 9e-08,
|
|
"litellm_provider": "nscale",
|
|
"metadata": {
|
|
"notes": "Pricing listed as $0.18/1M tokens total. Assumed 50/50 split for input/output."
|
|
},
|
|
"mode": "chat",
|
|
"output_cost_per_token": 9e-08,
|
|
"source": "https://docs.nscale.com/docs/inference/serverless-models/current#chat-models"
|
|
},
|
|
"nscale/deepseek-ai/DeepSeek-R1-Distill-Qwen-14B": {
|
|
"input_cost_per_token": 7e-08,
|
|
"litellm_provider": "nscale",
|
|
"metadata": {
|
|
"notes": "Pricing listed as $0.14/1M tokens total. Assumed 50/50 split for input/output."
|
|
},
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7e-08,
|
|
"source": "https://docs.nscale.com/docs/inference/serverless-models/current#chat-models"
|
|
},
|
|
"nscale/deepseek-ai/DeepSeek-R1-Distill-Qwen-32B": {
|
|
"input_cost_per_token": 1.5e-07,
|
|
"litellm_provider": "nscale",
|
|
"metadata": {
|
|
"notes": "Pricing listed as $0.30/1M tokens total. Assumed 50/50 split for input/output."
|
|
},
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-07,
|
|
"source": "https://docs.nscale.com/docs/inference/serverless-models/current#chat-models"
|
|
},
|
|
"nscale/deepseek-ai/DeepSeek-R1-Distill-Qwen-7B": {
|
|
"input_cost_per_token": 2e-07,
|
|
"litellm_provider": "nscale",
|
|
"metadata": {
|
|
"notes": "Pricing listed as $0.40/1M tokens total. Assumed 50/50 split for input/output."
|
|
},
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2e-07,
|
|
"source": "https://docs.nscale.com/docs/inference/serverless-models/current#chat-models"
|
|
},
|
|
"nscale/meta-llama/Llama-3.1-8B-Instruct": {
|
|
"input_cost_per_token": 3e-08,
|
|
"litellm_provider": "nscale",
|
|
"metadata": {
|
|
"notes": "Pricing listed as $0.06/1M tokens total. Assumed 50/50 split for input/output."
|
|
},
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-08,
|
|
"source": "https://docs.nscale.com/docs/inference/serverless-models/current#chat-models"
|
|
},
|
|
"nscale/meta-llama/Llama-3.3-70B-Instruct": {
|
|
"input_cost_per_token": 2e-07,
|
|
"litellm_provider": "nscale",
|
|
"metadata": {
|
|
"notes": "Pricing listed as $0.40/1M tokens total. Assumed 50/50 split for input/output."
|
|
},
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2e-07,
|
|
"source": "https://docs.nscale.com/docs/inference/serverless-models/current#chat-models"
|
|
},
|
|
"nscale/meta-llama/Llama-4-Scout-17B-16E-Instruct": {
|
|
"input_cost_per_token": 9e-08,
|
|
"litellm_provider": "nscale",
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.9e-07,
|
|
"source": "https://docs.nscale.com/docs/inference/serverless-models/current#chat-models"
|
|
},
|
|
"nscale/mistralai/mixtral-8x22b-instruct-v0.1": {
|
|
"input_cost_per_token": 6e-07,
|
|
"litellm_provider": "nscale",
|
|
"metadata": {
|
|
"notes": "Pricing listed as $1.20/1M tokens total. Assumed 50/50 split for input/output."
|
|
},
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-07,
|
|
"source": "https://docs.nscale.com/docs/inference/serverless-models/current#chat-models"
|
|
},
|
|
"nscale/stabilityai/stable-diffusion-xl-base-1.0": {
|
|
"input_cost_per_pixel": 3e-09,
|
|
"litellm_provider": "nscale",
|
|
"mode": "image_generation",
|
|
"output_cost_per_pixel": 0.0,
|
|
"source": "https://docs.nscale.com/docs/inference/serverless-models/current#image-models",
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"o1": {
|
|
"cache_read_input_token_cost": 7.5e-06,
|
|
"input_cost_per_token": 1.5e-05,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 100000,
|
|
"max_tokens": 100000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-05,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"o1-2024-12-17": {
|
|
"cache_read_input_token_cost": 7.5e-06,
|
|
"input_cost_per_token": 1.5e-05,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 100000,
|
|
"max_tokens": 100000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-05,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"o1-mini": {
|
|
"cache_read_input_token_cost": 5.5e-07,
|
|
"input_cost_per_token": 1.1e-06,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 65536,
|
|
"max_tokens": 65536,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4.4e-06,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_vision": true
|
|
},
|
|
"o1-mini-2024-09-12": {
|
|
"deprecation_date": "2025-10-27",
|
|
"cache_read_input_token_cost": 1.5e-06,
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 65536,
|
|
"max_tokens": 65536,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.2e-05,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_vision": true
|
|
},
|
|
"o1-preview": {
|
|
"cache_read_input_token_cost": 7.5e-06,
|
|
"input_cost_per_token": 1.5e-05,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-05,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_vision": true
|
|
},
|
|
"o1-preview-2024-09-12": {
|
|
"cache_read_input_token_cost": 7.5e-06,
|
|
"input_cost_per_token": 1.5e-05,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-05,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_vision": true
|
|
},
|
|
"o1-pro": {
|
|
"input_cost_per_token": 0.00015,
|
|
"input_cost_per_token_batches": 7.5e-05,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 100000,
|
|
"max_tokens": 100000,
|
|
"mode": "responses",
|
|
"output_cost_per_token": 0.0006,
|
|
"output_cost_per_token_batches": 0.0003,
|
|
"supported_endpoints": [
|
|
"/v1/responses",
|
|
"/v1/batch"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": false,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"o1-pro-2025-03-19": {
|
|
"input_cost_per_token": 0.00015,
|
|
"input_cost_per_token_batches": 7.5e-05,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 100000,
|
|
"max_tokens": 100000,
|
|
"mode": "responses",
|
|
"output_cost_per_token": 0.0006,
|
|
"output_cost_per_token_batches": 0.0003,
|
|
"supported_endpoints": [
|
|
"/v1/responses",
|
|
"/v1/batch"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": false,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"o3": {
|
|
"cache_read_input_token_cost": 5e-07,
|
|
"cache_read_input_token_cost_flex": 2.5e-07,
|
|
"cache_read_input_token_cost_priority": 8.75e-07,
|
|
"input_cost_per_token": 2e-06,
|
|
"input_cost_per_token_flex": 1e-06,
|
|
"input_cost_per_token_priority": 3.5e-06,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 100000,
|
|
"max_tokens": 100000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 8e-06,
|
|
"output_cost_per_token_flex": 4e-06,
|
|
"output_cost_per_token_priority": 1.4e-05,
|
|
"supported_endpoints": [
|
|
"/v1/responses",
|
|
"/v1/chat/completions",
|
|
"/v1/completions",
|
|
"/v1/batch"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": false,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_service_tier": true,
|
|
"supports_vision": true
|
|
},
|
|
"o3-2025-04-16": {
|
|
"cache_read_input_token_cost": 5e-07,
|
|
"input_cost_per_token": 2e-06,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 100000,
|
|
"max_tokens": 100000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 8e-06,
|
|
"supported_endpoints": [
|
|
"/v1/responses",
|
|
"/v1/chat/completions",
|
|
"/v1/completions",
|
|
"/v1/batch"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": false,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_service_tier": true,
|
|
"supports_vision": true
|
|
},
|
|
"o3-deep-research": {
|
|
"cache_read_input_token_cost": 2.5e-06,
|
|
"input_cost_per_token": 1e-05,
|
|
"input_cost_per_token_batches": 5e-06,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 100000,
|
|
"max_tokens": 100000,
|
|
"mode": "responses",
|
|
"output_cost_per_token": 4e-05,
|
|
"output_cost_per_token_batches": 2e-05,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/batch",
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"o3-deep-research-2025-06-26": {
|
|
"cache_read_input_token_cost": 2.5e-06,
|
|
"input_cost_per_token": 1e-05,
|
|
"input_cost_per_token_batches": 5e-06,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 100000,
|
|
"max_tokens": 100000,
|
|
"mode": "responses",
|
|
"output_cost_per_token": 4e-05,
|
|
"output_cost_per_token_batches": 2e-05,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/batch",
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"o3-mini": {
|
|
"cache_read_input_token_cost": 5.5e-07,
|
|
"input_cost_per_token": 1.1e-06,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 100000,
|
|
"max_tokens": 100000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4.4e-06,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": false,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": false
|
|
},
|
|
"o3-mini-2025-01-31": {
|
|
"cache_read_input_token_cost": 5.5e-07,
|
|
"input_cost_per_token": 1.1e-06,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 100000,
|
|
"max_tokens": 100000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4.4e-06,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": false,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": false
|
|
},
|
|
"o3-pro": {
|
|
"input_cost_per_token": 2e-05,
|
|
"input_cost_per_token_batches": 1e-05,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 100000,
|
|
"max_tokens": 100000,
|
|
"mode": "responses",
|
|
"output_cost_per_token": 8e-05,
|
|
"output_cost_per_token_batches": 4e-05,
|
|
"supported_endpoints": [
|
|
"/v1/responses",
|
|
"/v1/batch"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": false,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"o3-pro-2025-06-10": {
|
|
"input_cost_per_token": 2e-05,
|
|
"input_cost_per_token_batches": 1e-05,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 100000,
|
|
"max_tokens": 100000,
|
|
"mode": "responses",
|
|
"output_cost_per_token": 8e-05,
|
|
"output_cost_per_token_batches": 4e-05,
|
|
"supported_endpoints": [
|
|
"/v1/responses",
|
|
"/v1/batch"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": false,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"o4-mini": {
|
|
"cache_read_input_token_cost": 2.75e-07,
|
|
"cache_read_input_token_cost_flex": 1.375e-07,
|
|
"cache_read_input_token_cost_priority": 5e-07,
|
|
"input_cost_per_token": 1.1e-06,
|
|
"input_cost_per_token_flex": 5.5e-07,
|
|
"input_cost_per_token_priority": 2e-06,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 100000,
|
|
"max_tokens": 100000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4.4e-06,
|
|
"output_cost_per_token_flex": 2.2e-06,
|
|
"output_cost_per_token_priority": 8e-06,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": false,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_service_tier": true,
|
|
"supports_vision": true
|
|
},
|
|
"o4-mini-2025-04-16": {
|
|
"cache_read_input_token_cost": 2.75e-07,
|
|
"input_cost_per_token": 1.1e-06,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 100000,
|
|
"max_tokens": 100000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4.4e-06,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": false,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_service_tier": true,
|
|
"supports_vision": true
|
|
},
|
|
"o4-mini-deep-research": {
|
|
"cache_read_input_token_cost": 5e-07,
|
|
"input_cost_per_token": 2e-06,
|
|
"input_cost_per_token_batches": 1e-06,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 100000,
|
|
"max_tokens": 100000,
|
|
"mode": "responses",
|
|
"output_cost_per_token": 8e-06,
|
|
"output_cost_per_token_batches": 4e-06,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/batch",
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"o4-mini-deep-research-2025-06-26": {
|
|
"cache_read_input_token_cost": 5e-07,
|
|
"input_cost_per_token": 2e-06,
|
|
"input_cost_per_token_batches": 1e-06,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 100000,
|
|
"max_tokens": 100000,
|
|
"mode": "responses",
|
|
"output_cost_per_token": 8e-06,
|
|
"output_cost_per_token_batches": 4e-06,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/batch",
|
|
"/v1/responses"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_native_streaming": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"oci/meta.llama-3.1-405b-instruct": {
|
|
"input_cost_per_token": 1.068e-05,
|
|
"litellm_provider": "oci",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.068e-05,
|
|
"source": "https://www.oracle.com/artificial-intelligence/generative-ai/generative-ai-service/pricing",
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": false
|
|
},
|
|
"oci/meta.llama-3.2-90b-vision-instruct": {
|
|
"input_cost_per_token": 2e-06,
|
|
"litellm_provider": "oci",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2e-06,
|
|
"source": "https://www.oracle.com/artificial-intelligence/generative-ai/generative-ai-service/pricing",
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": false
|
|
},
|
|
"oci/meta.llama-3.3-70b-instruct": {
|
|
"input_cost_per_token": 7.2e-07,
|
|
"litellm_provider": "oci",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7.2e-07,
|
|
"source": "https://www.oracle.com/artificial-intelligence/generative-ai/generative-ai-service/pricing",
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": false
|
|
},
|
|
"oci/meta.llama-4-maverick-17b-128e-instruct-fp8": {
|
|
"input_cost_per_token": 7.2e-07,
|
|
"litellm_provider": "oci",
|
|
"max_input_tokens": 512000,
|
|
"max_output_tokens": 4000,
|
|
"max_tokens": 512000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7.2e-07,
|
|
"source": "https://www.oracle.com/artificial-intelligence/generative-ai/generative-ai-service/pricing",
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": false
|
|
},
|
|
"oci/meta.llama-4-scout-17b-16e-instruct": {
|
|
"input_cost_per_token": 7.2e-07,
|
|
"litellm_provider": "oci",
|
|
"max_input_tokens": 192000,
|
|
"max_output_tokens": 4000,
|
|
"max_tokens": 192000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7.2e-07,
|
|
"source": "https://www.oracle.com/artificial-intelligence/generative-ai/generative-ai-service/pricing",
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": false
|
|
},
|
|
"oci/xai.grok-3": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "oci",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-07,
|
|
"source": "https://www.oracle.com/artificial-intelligence/generative-ai/generative-ai-service/pricing",
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": false
|
|
},
|
|
"oci/xai.grok-3-fast": {
|
|
"input_cost_per_token": 5e-06,
|
|
"litellm_provider": "oci",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.5e-05,
|
|
"source": "https://www.oracle.com/artificial-intelligence/generative-ai/generative-ai-service/pricing",
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": false
|
|
},
|
|
"oci/xai.grok-3-mini": {
|
|
"input_cost_per_token": 3e-07,
|
|
"litellm_provider": "oci",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 5e-07,
|
|
"source": "https://www.oracle.com/artificial-intelligence/generative-ai/generative-ai-service/pricing",
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": false
|
|
},
|
|
"oci/xai.grok-3-mini-fast": {
|
|
"input_cost_per_token": 6e-07,
|
|
"litellm_provider": "oci",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4e-06,
|
|
"source": "https://www.oracle.com/artificial-intelligence/generative-ai/generative-ai-service/pricing",
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": false
|
|
},
|
|
"oci/xai.grok-4": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "oci",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-07,
|
|
"source": "https://www.oracle.com/artificial-intelligence/generative-ai/generative-ai-service/pricing",
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": false
|
|
},
|
|
"oci/cohere.command-latest": {
|
|
"input_cost_per_token": 1.56e-06,
|
|
"litellm_provider": "oci",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.56e-06,
|
|
"source": "https://www.oracle.com/cloud/ai/generative-ai/pricing/",
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": false
|
|
},
|
|
"oci/cohere.command-a-03-2025": {
|
|
"input_cost_per_token": 1.56e-06,
|
|
"litellm_provider": "oci",
|
|
"max_input_tokens": 256000,
|
|
"max_output_tokens": 4000,
|
|
"max_tokens": 256000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.56e-06,
|
|
"source": "https://www.oracle.com/cloud/ai/generative-ai/pricing/",
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": false
|
|
},
|
|
"oci/cohere.command-plus-latest": {
|
|
"input_cost_per_token": 1.56e-06,
|
|
"litellm_provider": "oci",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.56e-06,
|
|
"source": "https://www.oracle.com/cloud/ai/generative-ai/pricing/",
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": false
|
|
},
|
|
"ollama/codegeex4": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "ollama",
|
|
"max_input_tokens": 32768,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.0,
|
|
"supports_function_calling": false
|
|
},
|
|
"ollama/codegemma": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "ollama",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "completion",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"ollama/codellama": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "ollama",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "completion",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"ollama/deepseek-coder-v2-base": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "ollama",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "completion",
|
|
"output_cost_per_token": 0.0,
|
|
"supports_function_calling": true
|
|
},
|
|
"ollama/deepseek-coder-v2-instruct": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "ollama",
|
|
"max_input_tokens": 32768,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.0,
|
|
"supports_function_calling": true
|
|
},
|
|
"ollama/deepseek-coder-v2-lite-base": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "ollama",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "completion",
|
|
"output_cost_per_token": 0.0,
|
|
"supports_function_calling": true
|
|
},
|
|
"ollama/deepseek-coder-v2-lite-instruct": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "ollama",
|
|
"max_input_tokens": 32768,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.0,
|
|
"supports_function_calling": true
|
|
},
|
|
"ollama/deepseek-v3.1:671b-cloud" : {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "ollama",
|
|
"max_input_tokens": 163840,
|
|
"max_output_tokens": 163840,
|
|
"max_tokens": 163840,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.0,
|
|
"supports_function_calling": true
|
|
},
|
|
"ollama/gpt-oss:120b-cloud" : {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "ollama",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.0,
|
|
"supports_function_calling": true
|
|
},
|
|
"ollama/gpt-oss:20b-cloud" : {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "ollama",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.0,
|
|
"supports_function_calling": true
|
|
},
|
|
"ollama/internlm2_5-20b-chat": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "ollama",
|
|
"max_input_tokens": 32768,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.0,
|
|
"supports_function_calling": true
|
|
},
|
|
"ollama/llama2": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "ollama",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"ollama/llama2-uncensored": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "ollama",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "completion",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"ollama/llama2:13b": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "ollama",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"ollama/llama2:70b": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "ollama",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"ollama/llama2:7b": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "ollama",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"ollama/llama3": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "ollama",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"ollama/llama3.1": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "ollama",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.0,
|
|
"supports_function_calling": true
|
|
},
|
|
"ollama/llama3:70b": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "ollama",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"ollama/llama3:8b": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "ollama",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"ollama/mistral": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "ollama",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "completion",
|
|
"output_cost_per_token": 0.0,
|
|
"supports_function_calling": true
|
|
},
|
|
"ollama/mistral-7B-Instruct-v0.1": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "ollama",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.0,
|
|
"supports_function_calling": true
|
|
},
|
|
"ollama/mistral-7B-Instruct-v0.2": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "ollama",
|
|
"max_input_tokens": 32768,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.0,
|
|
"supports_function_calling": true
|
|
},
|
|
"ollama/mistral-large-instruct-2407": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "ollama",
|
|
"max_input_tokens": 65536,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 65536,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.0,
|
|
"supports_function_calling": true
|
|
},
|
|
"ollama/mixtral-8x22B-Instruct-v0.1": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "ollama",
|
|
"max_input_tokens": 65536,
|
|
"max_output_tokens": 65536,
|
|
"max_tokens": 65536,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.0,
|
|
"supports_function_calling": true
|
|
},
|
|
"ollama/mixtral-8x7B-Instruct-v0.1": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "ollama",
|
|
"max_input_tokens": 32768,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.0,
|
|
"supports_function_calling": true
|
|
},
|
|
"ollama/orca-mini": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "ollama",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "completion",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"ollama/qwen3-coder:480b-cloud": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "ollama",
|
|
"max_input_tokens": 262144,
|
|
"max_output_tokens": 262144,
|
|
"max_tokens": 262144,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.0,
|
|
"supports_function_calling": true
|
|
},
|
|
"ollama/vicuna": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "ollama",
|
|
"max_input_tokens": 2048,
|
|
"max_output_tokens": 2048,
|
|
"max_tokens": 2048,
|
|
"mode": "completion",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"omni-moderation-2024-09-26": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 32768,
|
|
"max_output_tokens": 0,
|
|
"max_tokens": 32768,
|
|
"mode": "moderation",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"omni-moderation-latest": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 32768,
|
|
"max_output_tokens": 0,
|
|
"max_tokens": 32768,
|
|
"mode": "moderation",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"omni-moderation-latest-intents": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 32768,
|
|
"max_output_tokens": 0,
|
|
"max_tokens": 32768,
|
|
"mode": "moderation",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"openai.gpt-oss-120b-1:0": {
|
|
"input_cost_per_token": 1.5e-07,
|
|
"litellm_provider": "bedrock_converse",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-07,
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"openai.gpt-oss-20b-1:0": {
|
|
"input_cost_per_token": 7e-08,
|
|
"litellm_provider": "bedrock_converse",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-07,
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"openrouter/anthropic/claude-2": {
|
|
"input_cost_per_token": 1.102e-05,
|
|
"litellm_provider": "openrouter",
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 100000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3.268e-05,
|
|
"supports_tool_choice": true
|
|
},
|
|
"openrouter/anthropic/claude-3-5-haiku": {
|
|
"input_cost_per_token": 1e-06,
|
|
"litellm_provider": "openrouter",
|
|
"max_tokens": 200000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 5e-06,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"openrouter/anthropic/claude-3-5-haiku-20241022": {
|
|
"input_cost_per_token": 1e-06,
|
|
"litellm_provider": "openrouter",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 5e-06,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"tool_use_system_prompt_tokens": 264
|
|
},
|
|
"openrouter/anthropic/claude-3-haiku": {
|
|
"input_cost_per_image": 0.0004,
|
|
"input_cost_per_token": 2.5e-07,
|
|
"litellm_provider": "openrouter",
|
|
"max_tokens": 200000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.25e-06,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"openrouter/anthropic/claude-3-haiku-20240307": {
|
|
"input_cost_per_token": 2.5e-07,
|
|
"litellm_provider": "openrouter",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.25e-06,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tool_use_system_prompt_tokens": 264
|
|
},
|
|
"openrouter/anthropic/claude-3-opus": {
|
|
"input_cost_per_token": 1.5e-05,
|
|
"litellm_provider": "openrouter",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7.5e-05,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tool_use_system_prompt_tokens": 395
|
|
},
|
|
"openrouter/anthropic/claude-3-sonnet": {
|
|
"input_cost_per_image": 0.0048,
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "openrouter",
|
|
"max_tokens": 200000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"openrouter/anthropic/claude-3.5-sonnet": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "openrouter",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tool_use_system_prompt_tokens": 159
|
|
},
|
|
"openrouter/anthropic/claude-3.5-sonnet:beta": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "openrouter",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tool_use_system_prompt_tokens": 159
|
|
},
|
|
"openrouter/anthropic/claude-3.7-sonnet": {
|
|
"input_cost_per_image": 0.0048,
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "openrouter",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tool_use_system_prompt_tokens": 159
|
|
},
|
|
"openrouter/anthropic/claude-3.7-sonnet:beta": {
|
|
"input_cost_per_image": 0.0048,
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "openrouter",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tool_use_system_prompt_tokens": 159
|
|
},
|
|
"openrouter/anthropic/claude-instant-v1": {
|
|
"input_cost_per_token": 1.63e-06,
|
|
"litellm_provider": "openrouter",
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 100000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 5.51e-06,
|
|
"supports_tool_choice": true
|
|
},
|
|
"openrouter/anthropic/claude-opus-4": {
|
|
"input_cost_per_image": 0.0048,
|
|
"cache_creation_input_token_cost": 1.875e-05,
|
|
"cache_read_input_token_cost": 1.5e-06,
|
|
"input_cost_per_token": 1.5e-05,
|
|
"litellm_provider": "openrouter",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 32000,
|
|
"max_tokens": 32000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7.5e-05,
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tool_use_system_prompt_tokens": 159
|
|
},
|
|
"openrouter/anthropic/claude-opus-4.1": {
|
|
"input_cost_per_image": 0.0048,
|
|
"cache_creation_input_token_cost": 1.875e-05,
|
|
"cache_creation_input_token_cost_above_1hr": 3e-05,
|
|
"cache_read_input_token_cost": 1.5e-06,
|
|
"input_cost_per_token": 1.5e-05,
|
|
"litellm_provider": "openrouter",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 32000,
|
|
"max_tokens": 32000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7.5e-05,
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tool_use_system_prompt_tokens": 159
|
|
},
|
|
"openrouter/anthropic/claude-sonnet-4": {
|
|
"input_cost_per_image": 0.0048,
|
|
"cache_creation_input_token_cost": 3.75e-06,
|
|
"cache_creation_input_token_cost_above_200k_tokens": 7.5e-06,
|
|
"cache_read_input_token_cost": 3e-07,
|
|
"cache_read_input_token_cost_above_200k_tokens": 6e-07,
|
|
"input_cost_per_token": 3e-06,
|
|
"input_cost_per_token_above_200k_tokens": 6e-06,
|
|
"output_cost_per_token_above_200k_tokens": 2.25e-05,
|
|
"litellm_provider": "openrouter",
|
|
"max_input_tokens": 1000000,
|
|
"max_output_tokens": 64000,
|
|
"max_tokens": 64000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tool_use_system_prompt_tokens": 159
|
|
},
|
|
"openrouter/anthropic/claude-opus-4.5": {
|
|
"cache_creation_input_token_cost": 6.25e-06,
|
|
"cache_read_input_token_cost": 5e-07,
|
|
"input_cost_per_token": 5e-06,
|
|
"litellm_provider": "openrouter",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 32000,
|
|
"max_tokens": 32000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.5e-05,
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tool_use_system_prompt_tokens": 159
|
|
},
|
|
"openrouter/anthropic/claude-sonnet-4.5": {
|
|
"input_cost_per_image": 0.0048,
|
|
"cache_creation_input_token_cost": 3.75e-06,
|
|
"cache_read_input_token_cost": 3e-07,
|
|
"input_cost_per_token": 3e-06,
|
|
"input_cost_per_token_above_200k_tokens": 6e-06,
|
|
"output_cost_per_token_above_200k_tokens": 2.25e-05,
|
|
"cache_creation_input_token_cost_above_200k_tokens": 7.5e-06,
|
|
"cache_read_input_token_cost_above_200k_tokens": 6e-07,
|
|
"litellm_provider": "openrouter",
|
|
"max_input_tokens": 1000000,
|
|
"max_output_tokens": 1000000,
|
|
"max_tokens": 1000000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tool_use_system_prompt_tokens": 159
|
|
},
|
|
"openrouter/anthropic/claude-haiku-4.5": {
|
|
"cache_creation_input_token_cost": 1.25e-06,
|
|
"cache_read_input_token_cost": 1e-07,
|
|
"input_cost_per_token": 1e-06,
|
|
"litellm_provider": "openrouter",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 200000,
|
|
"max_tokens": 200000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 5e-06,
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tool_use_system_prompt_tokens": 346
|
|
},
|
|
"openrouter/bytedance/ui-tars-1.5-7b": {
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "openrouter",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 2048,
|
|
"max_tokens": 2048,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2e-07,
|
|
"source": "https://openrouter.ai/api/v1/models/bytedance/ui-tars-1.5-7b",
|
|
"supports_tool_choice": true
|
|
},
|
|
"openrouter/cognitivecomputations/dolphin-mixtral-8x7b": {
|
|
"input_cost_per_token": 5e-07,
|
|
"litellm_provider": "openrouter",
|
|
"max_tokens": 32769,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 5e-07,
|
|
"supports_tool_choice": true
|
|
},
|
|
"openrouter/cohere/command-r-plus": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "openrouter",
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"supports_tool_choice": true
|
|
},
|
|
"openrouter/databricks/dbrx-instruct": {
|
|
"input_cost_per_token": 6e-07,
|
|
"litellm_provider": "openrouter",
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-07,
|
|
"supports_tool_choice": true
|
|
},
|
|
"openrouter/deepseek/deepseek-chat": {
|
|
"input_cost_per_token": 1.4e-07,
|
|
"litellm_provider": "openrouter",
|
|
"max_input_tokens": 65536,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.8e-07,
|
|
"supports_prompt_caching": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"openrouter/deepseek/deepseek-chat-v3-0324": {
|
|
"input_cost_per_token": 1.4e-07,
|
|
"litellm_provider": "openrouter",
|
|
"max_input_tokens": 65536,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.8e-07,
|
|
"supports_prompt_caching": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"openrouter/deepseek/deepseek-chat-v3.1": {
|
|
"input_cost_per_token": 2e-07,
|
|
"input_cost_per_token_cache_hit": 2e-08,
|
|
"litellm_provider": "openrouter",
|
|
"max_input_tokens": 163840,
|
|
"max_output_tokens": 163840,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 8e-07,
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"openrouter/deepseek/deepseek-v3.2": {
|
|
"input_cost_per_token": 2.8e-07,
|
|
"input_cost_per_token_cache_hit": 2.8e-08,
|
|
"litellm_provider": "openrouter",
|
|
"max_input_tokens": 163840,
|
|
"max_output_tokens": 163840,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4e-07,
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"openrouter/deepseek/deepseek-v3.2-exp": {
|
|
"input_cost_per_token": 2e-07,
|
|
"input_cost_per_token_cache_hit": 2e-08,
|
|
"litellm_provider": "openrouter",
|
|
"max_input_tokens": 163840,
|
|
"max_output_tokens": 163840,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4e-07,
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": false,
|
|
"supports_tool_choice": true
|
|
},
|
|
"openrouter/deepseek/deepseek-coder": {
|
|
"input_cost_per_token": 1.4e-07,
|
|
"litellm_provider": "openrouter",
|
|
"max_input_tokens": 66000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.8e-07,
|
|
"supports_prompt_caching": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"openrouter/deepseek/deepseek-r1": {
|
|
"input_cost_per_token": 5.5e-07,
|
|
"input_cost_per_token_cache_hit": 1.4e-07,
|
|
"litellm_provider": "openrouter",
|
|
"max_input_tokens": 65336,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.19e-06,
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"openrouter/deepseek/deepseek-r1-0528": {
|
|
"input_cost_per_token": 5e-07,
|
|
"input_cost_per_token_cache_hit": 1.4e-07,
|
|
"litellm_provider": "openrouter",
|
|
"max_input_tokens": 65336,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.15e-06,
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"openrouter/fireworks/firellava-13b": {
|
|
"input_cost_per_token": 2e-07,
|
|
"litellm_provider": "openrouter",
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2e-07,
|
|
"supports_tool_choice": true
|
|
},
|
|
"openrouter/google/gemini-2.0-flash-001": {
|
|
"input_cost_per_audio_token": 7e-07,
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "openrouter",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 8192,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 8192,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4e-07,
|
|
"supports_audio_output": true,
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"openrouter/google/gemini-2.5-flash": {
|
|
"input_cost_per_audio_token": 7e-07,
|
|
"input_cost_per_token": 3e-07,
|
|
"litellm_provider": "openrouter",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 8192,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 8192,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.5e-06,
|
|
"supports_audio_output": true,
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"openrouter/google/gemini-2.5-pro": {
|
|
"input_cost_per_audio_token": 7e-07,
|
|
"input_cost_per_token": 1.25e-06,
|
|
"litellm_provider": "openrouter",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 8192,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 8192,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-05,
|
|
"supports_audio_output": true,
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"openrouter/google/gemini-3-pro-preview": {
|
|
"cache_read_input_token_cost": 2e-07,
|
|
"cache_read_input_token_cost_above_200k_tokens": 4e-07,
|
|
"cache_creation_input_token_cost_above_200k_tokens": 2.5e-07,
|
|
"input_cost_per_token": 2e-06,
|
|
"input_cost_per_token_above_200k_tokens": 4e-06,
|
|
"input_cost_per_token_batches": 1e-06,
|
|
"litellm_provider": "openrouter",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 65535,
|
|
"max_pdf_size_mb": 30,
|
|
"max_tokens": 65535,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.2e-05,
|
|
"output_cost_per_token_above_200k_tokens": 1.8e-05,
|
|
"output_cost_per_token_batches": 6e-06,
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/completions",
|
|
"/v1/batch"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_audio_input": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_video_input": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true
|
|
},
|
|
"openrouter/google/gemini-pro-1.5": {
|
|
"input_cost_per_image": 0.00265,
|
|
"input_cost_per_token": 2.5e-06,
|
|
"litellm_provider": "openrouter",
|
|
"max_input_tokens": 1000000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7.5e-06,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"openrouter/google/gemini-pro-vision": {
|
|
"input_cost_per_image": 0.0025,
|
|
"input_cost_per_token": 1.25e-07,
|
|
"litellm_provider": "openrouter",
|
|
"max_tokens": 45875,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3.75e-07,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"openrouter/google/palm-2-chat-bison": {
|
|
"input_cost_per_token": 5e-07,
|
|
"litellm_provider": "openrouter",
|
|
"max_tokens": 25804,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 5e-07,
|
|
"supports_tool_choice": true
|
|
},
|
|
"openrouter/google/palm-2-codechat-bison": {
|
|
"input_cost_per_token": 5e-07,
|
|
"litellm_provider": "openrouter",
|
|
"max_tokens": 20070,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 5e-07,
|
|
"supports_tool_choice": true
|
|
},
|
|
"openrouter/gryphe/mythomax-l2-13b": {
|
|
"input_cost_per_token": 1.875e-06,
|
|
"litellm_provider": "openrouter",
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.875e-06,
|
|
"supports_tool_choice": true
|
|
},
|
|
"openrouter/jondurbin/airoboros-l2-70b-2.1": {
|
|
"input_cost_per_token": 1.3875e-05,
|
|
"litellm_provider": "openrouter",
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.3875e-05,
|
|
"supports_tool_choice": true
|
|
},
|
|
"openrouter/mancer/weaver": {
|
|
"input_cost_per_token": 5.625e-06,
|
|
"litellm_provider": "openrouter",
|
|
"max_tokens": 8000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 5.625e-06,
|
|
"supports_tool_choice": true
|
|
},
|
|
"openrouter/meta-llama/codellama-34b-instruct": {
|
|
"input_cost_per_token": 5e-07,
|
|
"litellm_provider": "openrouter",
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 5e-07,
|
|
"supports_tool_choice": true
|
|
},
|
|
"openrouter/meta-llama/llama-2-13b-chat": {
|
|
"input_cost_per_token": 2e-07,
|
|
"litellm_provider": "openrouter",
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2e-07,
|
|
"supports_tool_choice": true
|
|
},
|
|
"openrouter/meta-llama/llama-2-70b-chat": {
|
|
"input_cost_per_token": 1.5e-06,
|
|
"litellm_provider": "openrouter",
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-06,
|
|
"supports_tool_choice": true
|
|
},
|
|
"openrouter/meta-llama/llama-3-70b-instruct": {
|
|
"input_cost_per_token": 5.9e-07,
|
|
"litellm_provider": "openrouter",
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7.9e-07,
|
|
"supports_tool_choice": true
|
|
},
|
|
"openrouter/meta-llama/llama-3-70b-instruct:nitro": {
|
|
"input_cost_per_token": 9e-07,
|
|
"litellm_provider": "openrouter",
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 9e-07,
|
|
"supports_tool_choice": true
|
|
},
|
|
"openrouter/meta-llama/llama-3-8b-instruct:extended": {
|
|
"input_cost_per_token": 2.25e-07,
|
|
"litellm_provider": "openrouter",
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.25e-06,
|
|
"supports_tool_choice": true
|
|
},
|
|
"openrouter/meta-llama/llama-3-8b-instruct:free": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "openrouter",
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.0,
|
|
"supports_tool_choice": true
|
|
},
|
|
"openrouter/microsoft/wizardlm-2-8x22b:nitro": {
|
|
"input_cost_per_token": 1e-06,
|
|
"litellm_provider": "openrouter",
|
|
"max_tokens": 65536,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-06,
|
|
"supports_tool_choice": true
|
|
},
|
|
"openrouter/minimax/minimax-m2": {
|
|
"input_cost_per_token": 2.55e-7,
|
|
"litellm_provider": "openrouter",
|
|
"max_input_tokens": 204800,
|
|
"max_output_tokens": 204800,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.02e-6,
|
|
"supports_function_calling": true,
|
|
"supports_prompt_caching": false,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"openrouter/mistralai/mistral-7b-instruct": {
|
|
"input_cost_per_token": 1.3e-07,
|
|
"litellm_provider": "openrouter",
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.3e-07,
|
|
"supports_tool_choice": true
|
|
},
|
|
"openrouter/mistralai/mistral-7b-instruct:free": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "openrouter",
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.0,
|
|
"supports_tool_choice": true
|
|
},
|
|
"openrouter/mistralai/mistral-large": {
|
|
"input_cost_per_token": 8e-06,
|
|
"litellm_provider": "openrouter",
|
|
"max_tokens": 32000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.4e-05,
|
|
"supports_tool_choice": true
|
|
},
|
|
"openrouter/mistralai/mistral-small-3.1-24b-instruct": {
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "openrouter",
|
|
"max_tokens": 32000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-07,
|
|
"supports_tool_choice": true
|
|
},
|
|
"openrouter/mistralai/mistral-small-3.2-24b-instruct": {
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "openrouter",
|
|
"max_tokens": 32000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-07,
|
|
"supports_tool_choice": true
|
|
},
|
|
"openrouter/mistralai/mixtral-8x22b-instruct": {
|
|
"input_cost_per_token": 6.5e-07,
|
|
"litellm_provider": "openrouter",
|
|
"max_tokens": 65536,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6.5e-07,
|
|
"supports_tool_choice": true
|
|
},
|
|
"openrouter/nousresearch/nous-hermes-llama2-13b": {
|
|
"input_cost_per_token": 2e-07,
|
|
"litellm_provider": "openrouter",
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2e-07,
|
|
"supports_tool_choice": true
|
|
},
|
|
"openrouter/openai/gpt-3.5-turbo": {
|
|
"input_cost_per_token": 1.5e-06,
|
|
"litellm_provider": "openrouter",
|
|
"max_tokens": 4095,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2e-06,
|
|
"supports_tool_choice": true
|
|
},
|
|
"openrouter/openai/gpt-3.5-turbo-16k": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "openrouter",
|
|
"max_tokens": 16383,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4e-06,
|
|
"supports_tool_choice": true
|
|
},
|
|
"openrouter/openai/gpt-4": {
|
|
"input_cost_per_token": 3e-05,
|
|
"litellm_provider": "openrouter",
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-05,
|
|
"supports_tool_choice": true
|
|
},
|
|
"openrouter/openai/gpt-4-vision-preview": {
|
|
"input_cost_per_image": 0.01445,
|
|
"input_cost_per_token": 1e-05,
|
|
"litellm_provider": "openrouter",
|
|
"max_tokens": 130000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-05,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"openrouter/openai/gpt-4.1": {
|
|
"cache_read_input_token_cost": 5e-07,
|
|
"input_cost_per_token": 2e-06,
|
|
"litellm_provider": "openrouter",
|
|
"max_input_tokens": 1047576,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 8e-06,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"openrouter/openai/gpt-4.1-2025-04-14": {
|
|
"cache_read_input_token_cost": 5e-07,
|
|
"input_cost_per_token": 2e-06,
|
|
"litellm_provider": "openrouter",
|
|
"max_input_tokens": 1047576,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 8e-06,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"openrouter/openai/gpt-4.1-mini": {
|
|
"cache_read_input_token_cost": 1e-07,
|
|
"input_cost_per_token": 4e-07,
|
|
"litellm_provider": "openrouter",
|
|
"max_input_tokens": 1047576,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.6e-06,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"openrouter/openai/gpt-4.1-mini-2025-04-14": {
|
|
"cache_read_input_token_cost": 1e-07,
|
|
"input_cost_per_token": 4e-07,
|
|
"litellm_provider": "openrouter",
|
|
"max_input_tokens": 1047576,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.6e-06,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"openrouter/openai/gpt-4.1-nano": {
|
|
"cache_read_input_token_cost": 2.5e-08,
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "openrouter",
|
|
"max_input_tokens": 1047576,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4e-07,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"openrouter/openai/gpt-4.1-nano-2025-04-14": {
|
|
"cache_read_input_token_cost": 2.5e-08,
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "openrouter",
|
|
"max_input_tokens": 1047576,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4e-07,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"openrouter/openai/gpt-4o": {
|
|
"input_cost_per_token": 2.5e-06,
|
|
"litellm_provider": "openrouter",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-05,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"openrouter/openai/gpt-4o-2024-05-13": {
|
|
"input_cost_per_token": 5e-06,
|
|
"litellm_provider": "openrouter",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"openrouter/openai/gpt-5-chat": {
|
|
"cache_read_input_token_cost": 1.25e-07,
|
|
"input_cost_per_token": 1.25e-06,
|
|
"litellm_provider": "openrouter",
|
|
"max_input_tokens": 272000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-05,
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"openrouter/openai/gpt-5-codex": {
|
|
"cache_read_input_token_cost": 1.25e-07,
|
|
"input_cost_per_token": 1.25e-06,
|
|
"litellm_provider": "openrouter",
|
|
"max_input_tokens": 272000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-05,
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"openrouter/openai/gpt-5": {
|
|
"cache_read_input_token_cost": 1.25e-07,
|
|
"input_cost_per_token": 1.25e-06,
|
|
"litellm_provider": "openrouter",
|
|
"max_input_tokens": 272000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-05,
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"openrouter/openai/gpt-5-mini": {
|
|
"cache_read_input_token_cost": 2.5e-08,
|
|
"input_cost_per_token": 2.5e-07,
|
|
"litellm_provider": "openrouter",
|
|
"max_input_tokens": 272000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2e-06,
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"openrouter/openai/gpt-5-nano": {
|
|
"cache_read_input_token_cost": 5e-09,
|
|
"input_cost_per_token": 5e-08,
|
|
"litellm_provider": "openrouter",
|
|
"max_input_tokens": 272000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4e-07,
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text"
|
|
],
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"openrouter/openai/gpt-oss-120b": {
|
|
"input_cost_per_token": 1.8e-07,
|
|
"litellm_provider": "openrouter",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 8e-07,
|
|
"source": "https://openrouter.ai/openai/gpt-oss-120b",
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"openrouter/openai/gpt-oss-20b": {
|
|
"input_cost_per_token": 1.8e-07,
|
|
"litellm_provider": "openrouter",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 8e-07,
|
|
"source": "https://openrouter.ai/openai/gpt-oss-20b",
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"openrouter/openai/o1": {
|
|
"cache_read_input_token_cost": 7.5e-06,
|
|
"input_cost_per_token": 1.5e-05,
|
|
"litellm_provider": "openrouter",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 100000,
|
|
"max_tokens": 100000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-05,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"openrouter/openai/o1-mini": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "openrouter",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 65536,
|
|
"max_tokens": 65536,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.2e-05,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": false
|
|
},
|
|
"openrouter/openai/o1-mini-2024-09-12": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "openrouter",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 65536,
|
|
"max_tokens": 65536,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.2e-05,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": false
|
|
},
|
|
"openrouter/openai/o1-preview": {
|
|
"input_cost_per_token": 1.5e-05,
|
|
"litellm_provider": "openrouter",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-05,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": false
|
|
},
|
|
"openrouter/openai/o1-preview-2024-09-12": {
|
|
"input_cost_per_token": 1.5e-05,
|
|
"litellm_provider": "openrouter",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-05,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": false
|
|
},
|
|
"openrouter/openai/o3-mini": {
|
|
"input_cost_per_token": 1.1e-06,
|
|
"litellm_provider": "openrouter",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 65536,
|
|
"max_tokens": 65536,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4.4e-06,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": false
|
|
},
|
|
"openrouter/openai/o3-mini-high": {
|
|
"input_cost_per_token": 1.1e-06,
|
|
"litellm_provider": "openrouter",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 65536,
|
|
"max_tokens": 65536,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4.4e-06,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": false
|
|
},
|
|
"openrouter/pygmalionai/mythalion-13b": {
|
|
"input_cost_per_token": 1.875e-06,
|
|
"litellm_provider": "openrouter",
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.875e-06,
|
|
"supports_tool_choice": true
|
|
},
|
|
"openrouter/qwen/qwen-2.5-coder-32b-instruct": {
|
|
"input_cost_per_token": 1.8e-07,
|
|
"litellm_provider": "openrouter",
|
|
"max_input_tokens": 33792,
|
|
"max_output_tokens": 33792,
|
|
"max_tokens": 33792,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.8e-07,
|
|
"supports_tool_choice": true
|
|
},
|
|
"openrouter/qwen/qwen-vl-plus": {
|
|
"input_cost_per_token": 2.1e-07,
|
|
"litellm_provider": "openrouter",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 2048,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6.3e-07,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"openrouter/qwen/qwen3-coder": {
|
|
"input_cost_per_token": 2.2e-7,
|
|
"litellm_provider": "openrouter",
|
|
"max_input_tokens": 262100,
|
|
"max_output_tokens": 262100,
|
|
"max_tokens": 262100,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 9.5e-7,
|
|
"source": "https://openrouter.ai/qwen/qwen3-coder",
|
|
"supports_tool_choice": true,
|
|
"supports_function_calling": true
|
|
},
|
|
"openrouter/switchpoint/router": {
|
|
"input_cost_per_token": 8.5e-07,
|
|
"litellm_provider": "openrouter",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3.4e-06,
|
|
"source": "https://openrouter.ai/switchpoint/router",
|
|
"supports_tool_choice": true
|
|
},
|
|
"openrouter/undi95/remm-slerp-l2-13b": {
|
|
"input_cost_per_token": 1.875e-06,
|
|
"litellm_provider": "openrouter",
|
|
"max_tokens": 6144,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.875e-06,
|
|
"supports_tool_choice": true
|
|
},
|
|
"openrouter/x-ai/grok-4": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "openrouter",
|
|
"max_input_tokens": 256000,
|
|
"max_output_tokens": 256000,
|
|
"max_tokens": 256000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"source": "https://openrouter.ai/x-ai/grok-4",
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true,
|
|
"supports_web_search": true
|
|
},
|
|
"openrouter/x-ai/grok-4-fast:free": {
|
|
"input_cost_per_token": 0,
|
|
"litellm_provider": "openrouter",
|
|
"max_input_tokens": 2000000,
|
|
"max_output_tokens": 30000,
|
|
"max_tokens": 2000000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0,
|
|
"source": "https://openrouter.ai/x-ai/grok-4-fast:free",
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true,
|
|
"supports_web_search": false
|
|
},
|
|
"openrouter/z-ai/glm-4.6": {
|
|
"input_cost_per_token": 4.0e-7,
|
|
"litellm_provider": "openrouter",
|
|
"max_input_tokens": 202800,
|
|
"max_output_tokens": 131000,
|
|
"max_tokens": 202800,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.75e-6,
|
|
"source": "https://openrouter.ai/z-ai/glm-4.6",
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"openrouter/z-ai/glm-4.6:exacto": {
|
|
"input_cost_per_token": 4.5e-7,
|
|
"litellm_provider": "openrouter",
|
|
"max_input_tokens": 202800,
|
|
"max_output_tokens": 131000,
|
|
"max_tokens": 202800,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.9e-6,
|
|
"source": "https://openrouter.ai/z-ai/glm-4.6:exacto",
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"ovhcloud/DeepSeek-R1-Distill-Llama-70B": {
|
|
"input_cost_per_token": 6.7e-07,
|
|
"litellm_provider": "ovhcloud",
|
|
"max_input_tokens": 131000,
|
|
"max_output_tokens": 131000,
|
|
"max_tokens": 131000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6.7e-07,
|
|
"source": "https://endpoints.ai.cloud.ovh.net/models/deepseek-r1-distill-llama-70b",
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"ovhcloud/Llama-3.1-8B-Instruct": {
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "ovhcloud",
|
|
"max_input_tokens": 131000,
|
|
"max_output_tokens": 131000,
|
|
"max_tokens": 131000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-07,
|
|
"source": "https://endpoints.ai.cloud.ovh.net/models/llama-3-1-8b-instruct",
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"ovhcloud/Meta-Llama-3_1-70B-Instruct": {
|
|
"input_cost_per_token": 6.7e-07,
|
|
"litellm_provider": "ovhcloud",
|
|
"max_input_tokens": 131000,
|
|
"max_output_tokens": 131000,
|
|
"max_tokens": 131000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6.7e-07,
|
|
"source": "https://endpoints.ai.cloud.ovh.net/models/meta-llama-3-1-70b-instruct",
|
|
"supports_function_calling": false,
|
|
"supports_response_schema": false,
|
|
"supports_tool_choice": false
|
|
},
|
|
"ovhcloud/Meta-Llama-3_3-70B-Instruct": {
|
|
"input_cost_per_token": 6.7e-07,
|
|
"litellm_provider": "ovhcloud",
|
|
"max_input_tokens": 131000,
|
|
"max_output_tokens": 131000,
|
|
"max_tokens": 131000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6.7e-07,
|
|
"source": "https://endpoints.ai.cloud.ovh.net/models/meta-llama-3-3-70b-instruct",
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"ovhcloud/Mistral-7B-Instruct-v0.3": {
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "ovhcloud",
|
|
"max_input_tokens": 127000,
|
|
"max_output_tokens": 127000,
|
|
"max_tokens": 127000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-07,
|
|
"source": "https://endpoints.ai.cloud.ovh.net/models/mistral-7b-instruct-v0-3",
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"ovhcloud/Mistral-Nemo-Instruct-2407": {
|
|
"input_cost_per_token": 1.3e-07,
|
|
"litellm_provider": "ovhcloud",
|
|
"max_input_tokens": 118000,
|
|
"max_output_tokens": 118000,
|
|
"max_tokens": 118000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.3e-07,
|
|
"source": "https://endpoints.ai.cloud.ovh.net/models/mistral-nemo-instruct-2407",
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"ovhcloud/Mistral-Small-3.2-24B-Instruct-2506": {
|
|
"input_cost_per_token": 9e-08,
|
|
"litellm_provider": "ovhcloud",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.8e-07,
|
|
"source": "https://endpoints.ai.cloud.ovh.net/models/mistral-small-3-2-24b-instruct-2506",
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"ovhcloud/Mixtral-8x7B-Instruct-v0.1": {
|
|
"input_cost_per_token": 6.3e-07,
|
|
"litellm_provider": "ovhcloud",
|
|
"max_input_tokens": 32000,
|
|
"max_output_tokens": 32000,
|
|
"max_tokens": 32000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6.3e-07,
|
|
"source": "https://endpoints.ai.cloud.ovh.net/models/mixtral-8x7b-instruct-v0-1",
|
|
"supports_function_calling": false,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": false
|
|
},
|
|
"ovhcloud/Qwen2.5-Coder-32B-Instruct": {
|
|
"input_cost_per_token": 8.7e-07,
|
|
"litellm_provider": "ovhcloud",
|
|
"max_input_tokens": 32000,
|
|
"max_output_tokens": 32000,
|
|
"max_tokens": 32000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 8.7e-07,
|
|
"source": "https://endpoints.ai.cloud.ovh.net/models/qwen2-5-coder-32b-instruct",
|
|
"supports_function_calling": false,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": false
|
|
},
|
|
"ovhcloud/Qwen2.5-VL-72B-Instruct": {
|
|
"input_cost_per_token": 9.1e-07,
|
|
"litellm_provider": "ovhcloud",
|
|
"max_input_tokens": 32000,
|
|
"max_output_tokens": 32000,
|
|
"max_tokens": 32000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 9.1e-07,
|
|
"source": "https://endpoints.ai.cloud.ovh.net/models/qwen2-5-vl-72b-instruct",
|
|
"supports_function_calling": false,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": false,
|
|
"supports_vision": true
|
|
},
|
|
"ovhcloud/Qwen3-32B": {
|
|
"input_cost_per_token": 8e-08,
|
|
"litellm_provider": "ovhcloud",
|
|
"max_input_tokens": 32000,
|
|
"max_output_tokens": 32000,
|
|
"max_tokens": 32000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.3e-07,
|
|
"source": "https://endpoints.ai.cloud.ovh.net/models/qwen3-32b",
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"ovhcloud/gpt-oss-120b": {
|
|
"input_cost_per_token": 8e-08,
|
|
"litellm_provider": "ovhcloud",
|
|
"max_input_tokens": 131000,
|
|
"max_output_tokens": 131000,
|
|
"max_tokens": 131000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4e-07,
|
|
"source": "https://endpoints.ai.cloud.ovh.net/models/gpt-oss-120b",
|
|
"supports_function_calling": false,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": false
|
|
},
|
|
"ovhcloud/gpt-oss-20b": {
|
|
"input_cost_per_token": 4e-08,
|
|
"litellm_provider": "ovhcloud",
|
|
"max_input_tokens": 131000,
|
|
"max_output_tokens": 131000,
|
|
"max_tokens": 131000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-07,
|
|
"source": "https://endpoints.ai.cloud.ovh.net/models/gpt-oss-20b",
|
|
"supports_function_calling": false,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": false
|
|
},
|
|
"ovhcloud/llava-v1.6-mistral-7b-hf": {
|
|
"input_cost_per_token": 2.9e-07,
|
|
"litellm_provider": "ovhcloud",
|
|
"max_input_tokens": 32000,
|
|
"max_output_tokens": 32000,
|
|
"max_tokens": 32000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.9e-07,
|
|
"source": "https://endpoints.ai.cloud.ovh.net/models/llava-next-mistral-7b",
|
|
"supports_function_calling": false,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": false,
|
|
"supports_vision": true
|
|
},
|
|
"ovhcloud/mamba-codestral-7B-v0.1": {
|
|
"input_cost_per_token": 1.9e-07,
|
|
"litellm_provider": "ovhcloud",
|
|
"max_input_tokens": 256000,
|
|
"max_output_tokens": 256000,
|
|
"max_tokens": 256000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.9e-07,
|
|
"source": "https://endpoints.ai.cloud.ovh.net/models/mamba-codestral-7b-v0-1",
|
|
"supports_function_calling": false,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": false
|
|
},
|
|
"palm/chat-bison": {
|
|
"input_cost_per_token": 1.25e-07,
|
|
"litellm_provider": "palm",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.25e-07,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models"
|
|
},
|
|
"palm/chat-bison-001": {
|
|
"input_cost_per_token": 1.25e-07,
|
|
"litellm_provider": "palm",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.25e-07,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models"
|
|
},
|
|
"palm/text-bison": {
|
|
"input_cost_per_token": 1.25e-07,
|
|
"litellm_provider": "palm",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 1024,
|
|
"max_tokens": 1024,
|
|
"mode": "completion",
|
|
"output_cost_per_token": 1.25e-07,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models"
|
|
},
|
|
"palm/text-bison-001": {
|
|
"input_cost_per_token": 1.25e-07,
|
|
"litellm_provider": "palm",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 1024,
|
|
"max_tokens": 1024,
|
|
"mode": "completion",
|
|
"output_cost_per_token": 1.25e-07,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models"
|
|
},
|
|
"palm/text-bison-safety-off": {
|
|
"input_cost_per_token": 1.25e-07,
|
|
"litellm_provider": "palm",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 1024,
|
|
"max_tokens": 1024,
|
|
"mode": "completion",
|
|
"output_cost_per_token": 1.25e-07,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models"
|
|
},
|
|
"palm/text-bison-safety-recitation-off": {
|
|
"input_cost_per_token": 1.25e-07,
|
|
"litellm_provider": "palm",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 1024,
|
|
"max_tokens": 1024,
|
|
"mode": "completion",
|
|
"output_cost_per_token": 1.25e-07,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models"
|
|
},
|
|
"parallel_ai/search": {
|
|
"input_cost_per_query": 0.004,
|
|
"litellm_provider": "parallel_ai",
|
|
"mode": "search"
|
|
},
|
|
"parallel_ai/search-pro": {
|
|
"input_cost_per_query": 0.009,
|
|
"litellm_provider": "parallel_ai",
|
|
"mode": "search"
|
|
},
|
|
"perplexity/codellama-34b-instruct": {
|
|
"input_cost_per_token": 3.5e-07,
|
|
"litellm_provider": "perplexity",
|
|
"max_input_tokens": 16384,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.4e-06
|
|
},
|
|
"perplexity/codellama-70b-instruct": {
|
|
"input_cost_per_token": 7e-07,
|
|
"litellm_provider": "perplexity",
|
|
"max_input_tokens": 16384,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.8e-06
|
|
},
|
|
"perplexity/llama-2-70b-chat": {
|
|
"input_cost_per_token": 7e-07,
|
|
"litellm_provider": "perplexity",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.8e-06
|
|
},
|
|
"perplexity/llama-3.1-70b-instruct": {
|
|
"input_cost_per_token": 1e-06,
|
|
"litellm_provider": "perplexity",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-06
|
|
},
|
|
"perplexity/llama-3.1-8b-instruct": {
|
|
"input_cost_per_token": 2e-07,
|
|
"litellm_provider": "perplexity",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2e-07
|
|
},
|
|
"perplexity/llama-3.1-sonar-huge-128k-online": {
|
|
"deprecation_date": "2025-02-22",
|
|
"input_cost_per_token": 5e-06,
|
|
"litellm_provider": "perplexity",
|
|
"max_input_tokens": 127072,
|
|
"max_output_tokens": 127072,
|
|
"max_tokens": 127072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 5e-06
|
|
},
|
|
"perplexity/llama-3.1-sonar-large-128k-chat": {
|
|
"deprecation_date": "2025-02-22",
|
|
"input_cost_per_token": 1e-06,
|
|
"litellm_provider": "perplexity",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-06
|
|
},
|
|
"perplexity/llama-3.1-sonar-large-128k-online": {
|
|
"deprecation_date": "2025-02-22",
|
|
"input_cost_per_token": 1e-06,
|
|
"litellm_provider": "perplexity",
|
|
"max_input_tokens": 127072,
|
|
"max_output_tokens": 127072,
|
|
"max_tokens": 127072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-06
|
|
},
|
|
"perplexity/llama-3.1-sonar-small-128k-chat": {
|
|
"deprecation_date": "2025-02-22",
|
|
"input_cost_per_token": 2e-07,
|
|
"litellm_provider": "perplexity",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2e-07
|
|
},
|
|
"perplexity/llama-3.1-sonar-small-128k-online": {
|
|
"deprecation_date": "2025-02-22",
|
|
"input_cost_per_token": 2e-07,
|
|
"litellm_provider": "perplexity",
|
|
"max_input_tokens": 127072,
|
|
"max_output_tokens": 127072,
|
|
"max_tokens": 127072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2e-07
|
|
},
|
|
"perplexity/mistral-7b-instruct": {
|
|
"input_cost_per_token": 7e-08,
|
|
"litellm_provider": "perplexity",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.8e-07
|
|
},
|
|
"perplexity/mixtral-8x7b-instruct": {
|
|
"input_cost_per_token": 7e-08,
|
|
"litellm_provider": "perplexity",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.8e-07
|
|
},
|
|
"perplexity/pplx-70b-chat": {
|
|
"input_cost_per_token": 7e-07,
|
|
"litellm_provider": "perplexity",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.8e-06
|
|
},
|
|
"perplexity/pplx-70b-online": {
|
|
"input_cost_per_request": 0.005,
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "perplexity",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.8e-06
|
|
},
|
|
"perplexity/pplx-7b-chat": {
|
|
"input_cost_per_token": 7e-08,
|
|
"litellm_provider": "perplexity",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.8e-07
|
|
},
|
|
"perplexity/pplx-7b-online": {
|
|
"input_cost_per_request": 0.005,
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "perplexity",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.8e-07
|
|
},
|
|
"perplexity/sonar": {
|
|
"input_cost_per_token": 1e-06,
|
|
"litellm_provider": "perplexity",
|
|
"max_input_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-06,
|
|
"search_context_cost_per_query": {
|
|
"search_context_size_high": 0.012,
|
|
"search_context_size_low": 0.005,
|
|
"search_context_size_medium": 0.008
|
|
},
|
|
"supports_web_search": true
|
|
},
|
|
"perplexity/sonar-deep-research": {
|
|
"citation_cost_per_token": 2e-06,
|
|
"input_cost_per_token": 2e-06,
|
|
"litellm_provider": "perplexity",
|
|
"max_input_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_reasoning_token": 3e-06,
|
|
"output_cost_per_token": 8e-06,
|
|
"search_context_cost_per_query": {
|
|
"search_context_size_high": 0.005,
|
|
"search_context_size_low": 0.005,
|
|
"search_context_size_medium": 0.005
|
|
},
|
|
"supports_reasoning": true,
|
|
"supports_web_search": true
|
|
},
|
|
"perplexity/sonar-medium-chat": {
|
|
"input_cost_per_token": 6e-07,
|
|
"litellm_provider": "perplexity",
|
|
"max_input_tokens": 16384,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.8e-06
|
|
},
|
|
"perplexity/sonar-medium-online": {
|
|
"input_cost_per_request": 0.005,
|
|
"input_cost_per_token": 0,
|
|
"litellm_provider": "perplexity",
|
|
"max_input_tokens": 12000,
|
|
"max_output_tokens": 12000,
|
|
"max_tokens": 12000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.8e-06
|
|
},
|
|
"perplexity/sonar-pro": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "perplexity",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 8000,
|
|
"max_tokens": 8000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"search_context_cost_per_query": {
|
|
"search_context_size_high": 0.014,
|
|
"search_context_size_low": 0.006,
|
|
"search_context_size_medium": 0.01
|
|
},
|
|
"supports_web_search": true
|
|
},
|
|
"perplexity/sonar-reasoning": {
|
|
"input_cost_per_token": 1e-06,
|
|
"litellm_provider": "perplexity",
|
|
"max_input_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 5e-06,
|
|
"search_context_cost_per_query": {
|
|
"search_context_size_high": 0.014,
|
|
"search_context_size_low": 0.005,
|
|
"search_context_size_medium": 0.008
|
|
},
|
|
"supports_reasoning": true,
|
|
"supports_web_search": true
|
|
},
|
|
"perplexity/sonar-reasoning-pro": {
|
|
"input_cost_per_token": 2e-06,
|
|
"litellm_provider": "perplexity",
|
|
"max_input_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 8e-06,
|
|
"search_context_cost_per_query": {
|
|
"search_context_size_high": 0.014,
|
|
"search_context_size_low": 0.006,
|
|
"search_context_size_medium": 0.01
|
|
},
|
|
"supports_reasoning": true,
|
|
"supports_web_search": true
|
|
},
|
|
"perplexity/sonar-small-chat": {
|
|
"input_cost_per_token": 7e-08,
|
|
"litellm_provider": "perplexity",
|
|
"max_input_tokens": 16384,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.8e-07
|
|
},
|
|
"perplexity/sonar-small-online": {
|
|
"input_cost_per_request": 0.005,
|
|
"input_cost_per_token": 0,
|
|
"litellm_provider": "perplexity",
|
|
"max_input_tokens": 12000,
|
|
"max_output_tokens": 12000,
|
|
"max_tokens": 12000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.8e-07
|
|
},
|
|
"publicai/swiss-ai/apertus-8b-instruct": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "publicai",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.0,
|
|
"source": "https://platform.publicai.co/docs",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"publicai/swiss-ai/apertus-70b-instruct": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "publicai",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.0,
|
|
"source": "https://platform.publicai.co/docs",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"publicai/aisingapore/Gemma-SEA-LION-v4-27B-IT": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "publicai",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.0,
|
|
"source": "https://platform.publicai.co/docs",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"publicai/BSC-LT/salamandra-7b-instruct-tools-16k": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "publicai",
|
|
"max_input_tokens": 16384,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.0,
|
|
"source": "https://platform.publicai.co/docs",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"publicai/BSC-LT/ALIA-40b-instruct_Q8_0": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "publicai",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.0,
|
|
"source": "https://platform.publicai.co/docs",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"publicai/allenai/Olmo-3-7B-Instruct": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "publicai",
|
|
"max_input_tokens": 32768,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.0,
|
|
"source": "https://platform.publicai.co/docs",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"publicai/aisingapore/Qwen-SEA-LION-v4-32B-IT": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "publicai",
|
|
"max_input_tokens": 32768,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.0,
|
|
"source": "https://platform.publicai.co/docs",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"publicai/allenai/Olmo-3-7B-Think": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "publicai",
|
|
"max_input_tokens": 32768,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.0,
|
|
"source": "https://platform.publicai.co/docs",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_reasoning": true
|
|
},
|
|
"publicai/allenai/Olmo-3-32B-Think": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "publicai",
|
|
"max_input_tokens": 32768,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.0,
|
|
"source": "https://platform.publicai.co/docs",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_reasoning": true
|
|
},
|
|
"qwen.qwen3-coder-480b-a35b-v1:0": {
|
|
"input_cost_per_token": 2.2e-07,
|
|
"litellm_provider": "bedrock_converse",
|
|
"max_input_tokens": 262000,
|
|
"max_output_tokens": 65536,
|
|
"max_tokens": 262144,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.8e-06,
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"qwen.qwen3-235b-a22b-2507-v1:0": {
|
|
"input_cost_per_token": 2.2e-07,
|
|
"litellm_provider": "bedrock_converse",
|
|
"max_input_tokens": 262144,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 262144,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 8.8e-07,
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"qwen.qwen3-coder-30b-a3b-v1:0": {
|
|
"input_cost_per_token": 1.5e-07,
|
|
"litellm_provider": "bedrock_converse",
|
|
"max_input_tokens": 262144,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 262144,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6.0e-07,
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"qwen.qwen3-32b-v1:0": {
|
|
"input_cost_per_token": 1.5e-07,
|
|
"litellm_provider": "bedrock_converse",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6.0e-07,
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"recraft/recraftv2": {
|
|
"litellm_provider": "recraft",
|
|
"mode": "image_generation",
|
|
"output_cost_per_image": 0.022,
|
|
"source": "https://www.recraft.ai/docs#pricing",
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"recraft/recraftv3": {
|
|
"litellm_provider": "recraft",
|
|
"mode": "image_generation",
|
|
"output_cost_per_image": 0.04,
|
|
"source": "https://www.recraft.ai/docs#pricing",
|
|
"supported_endpoints": [
|
|
"/v1/images/generations"
|
|
]
|
|
},
|
|
"replicate/meta/llama-2-13b": {
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "replicate",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 5e-07,
|
|
"supports_tool_choice": true
|
|
},
|
|
"replicate/meta/llama-2-13b-chat": {
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "replicate",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 5e-07,
|
|
"supports_tool_choice": true
|
|
},
|
|
"replicate/meta/llama-2-70b": {
|
|
"input_cost_per_token": 6.5e-07,
|
|
"litellm_provider": "replicate",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.75e-06,
|
|
"supports_tool_choice": true
|
|
},
|
|
"replicate/meta/llama-2-70b-chat": {
|
|
"input_cost_per_token": 6.5e-07,
|
|
"litellm_provider": "replicate",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.75e-06,
|
|
"supports_tool_choice": true
|
|
},
|
|
"replicate/meta/llama-2-7b": {
|
|
"input_cost_per_token": 5e-08,
|
|
"litellm_provider": "replicate",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.5e-07,
|
|
"supports_tool_choice": true
|
|
},
|
|
"replicate/meta/llama-2-7b-chat": {
|
|
"input_cost_per_token": 5e-08,
|
|
"litellm_provider": "replicate",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.5e-07,
|
|
"supports_tool_choice": true
|
|
},
|
|
"replicate/meta/llama-3-70b": {
|
|
"input_cost_per_token": 6.5e-07,
|
|
"litellm_provider": "replicate",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.75e-06,
|
|
"supports_tool_choice": true
|
|
},
|
|
"replicate/meta/llama-3-70b-instruct": {
|
|
"input_cost_per_token": 6.5e-07,
|
|
"litellm_provider": "replicate",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.75e-06,
|
|
"supports_tool_choice": true
|
|
},
|
|
"replicate/meta/llama-3-8b": {
|
|
"input_cost_per_token": 5e-08,
|
|
"litellm_provider": "replicate",
|
|
"max_input_tokens": 8086,
|
|
"max_output_tokens": 8086,
|
|
"max_tokens": 8086,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.5e-07,
|
|
"supports_tool_choice": true
|
|
},
|
|
"replicate/meta/llama-3-8b-instruct": {
|
|
"input_cost_per_token": 5e-08,
|
|
"litellm_provider": "replicate",
|
|
"max_input_tokens": 8086,
|
|
"max_output_tokens": 8086,
|
|
"max_tokens": 8086,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.5e-07,
|
|
"supports_tool_choice": true
|
|
},
|
|
"replicate/mistralai/mistral-7b-instruct-v0.2": {
|
|
"input_cost_per_token": 5e-08,
|
|
"litellm_provider": "replicate",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.5e-07,
|
|
"supports_tool_choice": true
|
|
},
|
|
"replicate/mistralai/mistral-7b-v0.1": {
|
|
"input_cost_per_token": 5e-08,
|
|
"litellm_provider": "replicate",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.5e-07,
|
|
"supports_tool_choice": true
|
|
},
|
|
"replicate/mistralai/mixtral-8x7b-instruct-v0.1": {
|
|
"input_cost_per_token": 3e-07,
|
|
"litellm_provider": "replicate",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-06,
|
|
"supports_tool_choice": true
|
|
},
|
|
"rerank-english-v2.0": {
|
|
"input_cost_per_query": 0.002,
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "cohere",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 4096,
|
|
"max_query_tokens": 2048,
|
|
"max_tokens": 4096,
|
|
"mode": "rerank",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"rerank-english-v3.0": {
|
|
"input_cost_per_query": 0.002,
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "cohere",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 4096,
|
|
"max_query_tokens": 2048,
|
|
"max_tokens": 4096,
|
|
"mode": "rerank",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"rerank-multilingual-v2.0": {
|
|
"input_cost_per_query": 0.002,
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "cohere",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 4096,
|
|
"max_query_tokens": 2048,
|
|
"max_tokens": 4096,
|
|
"mode": "rerank",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"rerank-multilingual-v3.0": {
|
|
"input_cost_per_query": 0.002,
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "cohere",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 4096,
|
|
"max_query_tokens": 2048,
|
|
"max_tokens": 4096,
|
|
"mode": "rerank",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"rerank-v3.5": {
|
|
"input_cost_per_query": 0.002,
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "cohere",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 4096,
|
|
"max_query_tokens": 2048,
|
|
"max_tokens": 4096,
|
|
"mode": "rerank",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"nvidia_nim/nvidia/nv-rerankqa-mistral-4b-v3": {
|
|
"input_cost_per_query": 0.0,
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "nvidia_nim",
|
|
"mode": "rerank",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"nvidia_nim/nvidia/llama-3_2-nv-rerankqa-1b-v2": {
|
|
"input_cost_per_query": 0.0,
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "nvidia_nim",
|
|
"mode": "rerank",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"sagemaker/meta-textgeneration-llama-2-13b": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "sagemaker",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "completion",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"sagemaker/meta-textgeneration-llama-2-13b-f": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "sagemaker",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"sagemaker/meta-textgeneration-llama-2-70b": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "sagemaker",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "completion",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"sagemaker/meta-textgeneration-llama-2-70b-b-f": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "sagemaker",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"sagemaker/meta-textgeneration-llama-2-7b": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "sagemaker",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "completion",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"sagemaker/meta-textgeneration-llama-2-7b-f": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "sagemaker",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"sambanova/DeepSeek-R1": {
|
|
"input_cost_per_token": 5e-06,
|
|
"litellm_provider": "sambanova",
|
|
"max_input_tokens": 32768,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7e-06,
|
|
"source": "https://cloud.sambanova.ai/plans/pricing"
|
|
},
|
|
"sambanova/DeepSeek-R1-Distill-Llama-70B": {
|
|
"input_cost_per_token": 7e-07,
|
|
"litellm_provider": "sambanova",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.4e-06,
|
|
"source": "https://cloud.sambanova.ai/plans/pricing"
|
|
},
|
|
"sambanova/DeepSeek-V3-0324": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "sambanova",
|
|
"max_input_tokens": 32768,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4.5e-06,
|
|
"source": "https://cloud.sambanova.ai/plans/pricing",
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"sambanova/Llama-4-Maverick-17B-128E-Instruct": {
|
|
"input_cost_per_token": 6.3e-07,
|
|
"litellm_provider": "sambanova",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"metadata": {
|
|
"notes": "For vision models, images are converted to 6432 input tokens and are billed at that amount"
|
|
},
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.8e-06,
|
|
"source": "https://cloud.sambanova.ai/plans/pricing",
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"sambanova/Llama-4-Scout-17B-16E-Instruct": {
|
|
"input_cost_per_token": 4e-07,
|
|
"litellm_provider": "sambanova",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"metadata": {
|
|
"notes": "For vision models, images are converted to 6432 input tokens and are billed at that amount"
|
|
},
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7e-07,
|
|
"source": "https://cloud.sambanova.ai/plans/pricing",
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"sambanova/Meta-Llama-3.1-405B-Instruct": {
|
|
"input_cost_per_token": 5e-06,
|
|
"litellm_provider": "sambanova",
|
|
"max_input_tokens": 16384,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-05,
|
|
"source": "https://cloud.sambanova.ai/plans/pricing",
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"sambanova/Meta-Llama-3.1-8B-Instruct": {
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "sambanova",
|
|
"max_input_tokens": 16384,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2e-07,
|
|
"source": "https://cloud.sambanova.ai/plans/pricing",
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"sambanova/Meta-Llama-3.2-1B-Instruct": {
|
|
"input_cost_per_token": 4e-08,
|
|
"litellm_provider": "sambanova",
|
|
"max_input_tokens": 16384,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 8e-08,
|
|
"source": "https://cloud.sambanova.ai/plans/pricing"
|
|
},
|
|
"sambanova/Meta-Llama-3.2-3B-Instruct": {
|
|
"input_cost_per_token": 8e-08,
|
|
"litellm_provider": "sambanova",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.6e-07,
|
|
"source": "https://cloud.sambanova.ai/plans/pricing"
|
|
},
|
|
"sambanova/Meta-Llama-3.3-70B-Instruct": {
|
|
"input_cost_per_token": 6e-07,
|
|
"litellm_provider": "sambanova",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.2e-06,
|
|
"source": "https://cloud.sambanova.ai/plans/pricing",
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"sambanova/Meta-Llama-Guard-3-8B": {
|
|
"input_cost_per_token": 3e-07,
|
|
"litellm_provider": "sambanova",
|
|
"max_input_tokens": 16384,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-07,
|
|
"source": "https://cloud.sambanova.ai/plans/pricing"
|
|
},
|
|
"sambanova/QwQ-32B": {
|
|
"input_cost_per_token": 5e-07,
|
|
"litellm_provider": "sambanova",
|
|
"max_input_tokens": 16384,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-06,
|
|
"source": "https://cloud.sambanova.ai/plans/pricing"
|
|
},
|
|
"sambanova/Qwen2-Audio-7B-Instruct": {
|
|
"input_cost_per_token": 5e-07,
|
|
"litellm_provider": "sambanova",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.0001,
|
|
"source": "https://cloud.sambanova.ai/plans/pricing",
|
|
"supports_audio_input": true
|
|
},
|
|
"sambanova/Qwen3-32B": {
|
|
"input_cost_per_token": 4e-07,
|
|
"litellm_provider": "sambanova",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 8e-07,
|
|
"source": "https://cloud.sambanova.ai/plans/pricing",
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"sambanova/DeepSeek-V3.1": {
|
|
"max_tokens": 32768,
|
|
"max_input_tokens": 32768,
|
|
"max_output_tokens": 32768,
|
|
"input_cost_per_token": 3e-06,
|
|
"output_cost_per_token": 4.5e-06,
|
|
"litellm_provider": "sambanova",
|
|
"mode": "chat",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_reasoning": true,
|
|
"source": "https://cloud.sambanova.ai/plans/pricing"
|
|
},
|
|
"sambanova/gpt-oss-120b": {
|
|
"max_tokens": 131072,
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"input_cost_per_token": 3e-06,
|
|
"output_cost_per_token": 4.5e-06,
|
|
"litellm_provider": "sambanova",
|
|
"mode": "chat",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_reasoning": true,
|
|
"source": "https://cloud.sambanova.ai/plans/pricing"
|
|
},
|
|
|
|
"snowflake/claude-3-5-sonnet": {
|
|
"litellm_provider": "snowflake",
|
|
"max_input_tokens": 18000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 18000,
|
|
"mode": "chat",
|
|
"supports_computer_use": true
|
|
},
|
|
"snowflake/deepseek-r1": {
|
|
"litellm_provider": "snowflake",
|
|
"max_input_tokens": 32768,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"supports_reasoning": true
|
|
},
|
|
"snowflake/gemma-7b": {
|
|
"litellm_provider": "snowflake",
|
|
"max_input_tokens": 8000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8000,
|
|
"mode": "chat"
|
|
},
|
|
"snowflake/jamba-1.5-large": {
|
|
"litellm_provider": "snowflake",
|
|
"max_input_tokens": 256000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 256000,
|
|
"mode": "chat"
|
|
},
|
|
"snowflake/jamba-1.5-mini": {
|
|
"litellm_provider": "snowflake",
|
|
"max_input_tokens": 256000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 256000,
|
|
"mode": "chat"
|
|
},
|
|
"snowflake/jamba-instruct": {
|
|
"litellm_provider": "snowflake",
|
|
"max_input_tokens": 256000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 256000,
|
|
"mode": "chat"
|
|
},
|
|
"snowflake/llama2-70b-chat": {
|
|
"litellm_provider": "snowflake",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 4096,
|
|
"mode": "chat"
|
|
},
|
|
"snowflake/llama3-70b": {
|
|
"litellm_provider": "snowflake",
|
|
"max_input_tokens": 8000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8000,
|
|
"mode": "chat"
|
|
},
|
|
"snowflake/llama3-8b": {
|
|
"litellm_provider": "snowflake",
|
|
"max_input_tokens": 8000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8000,
|
|
"mode": "chat"
|
|
},
|
|
"snowflake/llama3.1-405b": {
|
|
"litellm_provider": "snowflake",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 128000,
|
|
"mode": "chat"
|
|
},
|
|
"snowflake/llama3.1-70b": {
|
|
"litellm_provider": "snowflake",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 128000,
|
|
"mode": "chat"
|
|
},
|
|
"snowflake/llama3.1-8b": {
|
|
"litellm_provider": "snowflake",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 128000,
|
|
"mode": "chat"
|
|
},
|
|
"snowflake/llama3.2-1b": {
|
|
"litellm_provider": "snowflake",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 128000,
|
|
"mode": "chat"
|
|
},
|
|
"snowflake/llama3.2-3b": {
|
|
"litellm_provider": "snowflake",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 128000,
|
|
"mode": "chat"
|
|
},
|
|
"snowflake/llama3.3-70b": {
|
|
"litellm_provider": "snowflake",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 128000,
|
|
"mode": "chat"
|
|
},
|
|
"snowflake/mistral-7b": {
|
|
"litellm_provider": "snowflake",
|
|
"max_input_tokens": 32000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 32000,
|
|
"mode": "chat"
|
|
},
|
|
"snowflake/mistral-large": {
|
|
"litellm_provider": "snowflake",
|
|
"max_input_tokens": 32000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 32000,
|
|
"mode": "chat"
|
|
},
|
|
"snowflake/mistral-large2": {
|
|
"litellm_provider": "snowflake",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 128000,
|
|
"mode": "chat"
|
|
},
|
|
"snowflake/mixtral-8x7b": {
|
|
"litellm_provider": "snowflake",
|
|
"max_input_tokens": 32000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 32000,
|
|
"mode": "chat"
|
|
},
|
|
"snowflake/reka-core": {
|
|
"litellm_provider": "snowflake",
|
|
"max_input_tokens": 32000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 32000,
|
|
"mode": "chat"
|
|
},
|
|
"snowflake/reka-flash": {
|
|
"litellm_provider": "snowflake",
|
|
"max_input_tokens": 100000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 100000,
|
|
"mode": "chat"
|
|
},
|
|
"snowflake/snowflake-arctic": {
|
|
"litellm_provider": "snowflake",
|
|
"max_input_tokens": 4096,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 4096,
|
|
"mode": "chat"
|
|
},
|
|
"snowflake/snowflake-llama-3.1-405b": {
|
|
"litellm_provider": "snowflake",
|
|
"max_input_tokens": 8000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8000,
|
|
"mode": "chat"
|
|
},
|
|
"snowflake/snowflake-llama-3.3-70b": {
|
|
"litellm_provider": "snowflake",
|
|
"max_input_tokens": 8000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8000,
|
|
"mode": "chat"
|
|
},
|
|
"stability.sd3-5-large-v1:0": {
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 77,
|
|
"max_tokens": 77,
|
|
"mode": "image_generation",
|
|
"output_cost_per_image": 0.08
|
|
},
|
|
"stability.sd3-large-v1:0": {
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 77,
|
|
"max_tokens": 77,
|
|
"mode": "image_generation",
|
|
"output_cost_per_image": 0.08
|
|
},
|
|
"stability.stable-image-core-v1:0": {
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 77,
|
|
"max_tokens": 77,
|
|
"mode": "image_generation",
|
|
"output_cost_per_image": 0.04
|
|
},
|
|
"stability.stable-image-core-v1:1": {
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 77,
|
|
"max_tokens": 77,
|
|
"mode": "image_generation",
|
|
"output_cost_per_image": 0.04
|
|
},
|
|
"stability.stable-image-ultra-v1:0": {
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 77,
|
|
"max_tokens": 77,
|
|
"mode": "image_generation",
|
|
"output_cost_per_image": 0.14
|
|
},
|
|
"stability.stable-image-ultra-v1:1": {
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 77,
|
|
"max_tokens": 77,
|
|
"mode": "image_generation",
|
|
"output_cost_per_image": 0.14
|
|
},
|
|
"standard/1024-x-1024/dall-e-3": {
|
|
"input_cost_per_pixel": 3.81469e-08,
|
|
"litellm_provider": "openai",
|
|
"mode": "image_generation",
|
|
"output_cost_per_pixel": 0.0
|
|
},
|
|
"standard/1024-x-1792/dall-e-3": {
|
|
"input_cost_per_pixel": 4.359e-08,
|
|
"litellm_provider": "openai",
|
|
"mode": "image_generation",
|
|
"output_cost_per_pixel": 0.0
|
|
},
|
|
"standard/1792-x-1024/dall-e-3": {
|
|
"input_cost_per_pixel": 4.359e-08,
|
|
"litellm_provider": "openai",
|
|
"mode": "image_generation",
|
|
"output_cost_per_pixel": 0.0
|
|
},
|
|
"tavily/search": {
|
|
"input_cost_per_query": 0.008,
|
|
"litellm_provider": "tavily",
|
|
"mode": "search"
|
|
},
|
|
"tavily/search-advanced": {
|
|
"input_cost_per_query": 0.016,
|
|
"litellm_provider": "tavily",
|
|
"mode": "search"
|
|
},
|
|
"text-bison": {
|
|
"input_cost_per_character": 2.5e-07,
|
|
"litellm_provider": "vertex_ai-text-models",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 2048,
|
|
"max_tokens": 2048,
|
|
"mode": "completion",
|
|
"output_cost_per_character": 5e-07,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models"
|
|
},
|
|
"text-bison32k": {
|
|
"input_cost_per_character": 2.5e-07,
|
|
"input_cost_per_token": 1.25e-07,
|
|
"litellm_provider": "vertex_ai-text-models",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 1024,
|
|
"max_tokens": 1024,
|
|
"mode": "completion",
|
|
"output_cost_per_character": 5e-07,
|
|
"output_cost_per_token": 1.25e-07,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models"
|
|
},
|
|
"text-bison32k@002": {
|
|
"input_cost_per_character": 2.5e-07,
|
|
"input_cost_per_token": 1.25e-07,
|
|
"litellm_provider": "vertex_ai-text-models",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 1024,
|
|
"max_tokens": 1024,
|
|
"mode": "completion",
|
|
"output_cost_per_character": 5e-07,
|
|
"output_cost_per_token": 1.25e-07,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models"
|
|
},
|
|
"text-bison@001": {
|
|
"input_cost_per_character": 2.5e-07,
|
|
"litellm_provider": "vertex_ai-text-models",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 1024,
|
|
"max_tokens": 1024,
|
|
"mode": "completion",
|
|
"output_cost_per_character": 5e-07,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models"
|
|
},
|
|
"text-bison@002": {
|
|
"input_cost_per_character": 2.5e-07,
|
|
"litellm_provider": "vertex_ai-text-models",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 1024,
|
|
"max_tokens": 1024,
|
|
"mode": "completion",
|
|
"output_cost_per_character": 5e-07,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models"
|
|
},
|
|
"text-completion-codestral/codestral-2405": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "text-completion-codestral",
|
|
"max_input_tokens": 32000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "completion",
|
|
"output_cost_per_token": 0.0,
|
|
"source": "https://docs.mistral.ai/capabilities/code_generation/"
|
|
},
|
|
"text-completion-codestral/codestral-latest": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "text-completion-codestral",
|
|
"max_input_tokens": 32000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "completion",
|
|
"output_cost_per_token": 0.0,
|
|
"source": "https://docs.mistral.ai/capabilities/code_generation/"
|
|
},
|
|
"text-embedding-004": {
|
|
"input_cost_per_character": 2.5e-08,
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "vertex_ai-embedding-models",
|
|
"max_input_tokens": 2048,
|
|
"max_tokens": 2048,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0,
|
|
"output_vector_size": 768,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models"
|
|
},
|
|
"text-embedding-005": {
|
|
"input_cost_per_character": 2.5e-08,
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "vertex_ai-embedding-models",
|
|
"max_input_tokens": 2048,
|
|
"max_tokens": 2048,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0,
|
|
"output_vector_size": 768,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models"
|
|
},
|
|
"text-embedding-3-large": {
|
|
"input_cost_per_token": 1.3e-07,
|
|
"input_cost_per_token_batches": 6.5e-08,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0,
|
|
"output_cost_per_token_batches": 0.0,
|
|
"output_vector_size": 3072
|
|
},
|
|
"text-embedding-3-small": {
|
|
"input_cost_per_token": 2e-08,
|
|
"input_cost_per_token_batches": 1e-08,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0,
|
|
"output_cost_per_token_batches": 0.0,
|
|
"output_vector_size": 1536
|
|
},
|
|
"text-embedding-ada-002": {
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0,
|
|
"output_vector_size": 1536
|
|
},
|
|
"text-embedding-ada-002-v2": {
|
|
"input_cost_per_token": 1e-07,
|
|
"input_cost_per_token_batches": 5e-08,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0,
|
|
"output_cost_per_token_batches": 0.0
|
|
},
|
|
"text-embedding-large-exp-03-07": {
|
|
"input_cost_per_character": 2.5e-08,
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "vertex_ai-embedding-models",
|
|
"max_input_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0,
|
|
"output_vector_size": 3072,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models"
|
|
},
|
|
"text-embedding-preview-0409": {
|
|
"input_cost_per_token": 6.25e-09,
|
|
"input_cost_per_token_batch_requests": 5e-09,
|
|
"litellm_provider": "vertex_ai-embedding-models",
|
|
"max_input_tokens": 3072,
|
|
"max_tokens": 3072,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0,
|
|
"output_vector_size": 768,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/pricing"
|
|
},
|
|
"text-moderation-007": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 32768,
|
|
"max_output_tokens": 0,
|
|
"max_tokens": 32768,
|
|
"mode": "moderation",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"text-moderation-latest": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 32768,
|
|
"max_output_tokens": 0,
|
|
"max_tokens": 32768,
|
|
"mode": "moderation",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"text-moderation-stable": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "openai",
|
|
"max_input_tokens": 32768,
|
|
"max_output_tokens": 0,
|
|
"max_tokens": 32768,
|
|
"mode": "moderation",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"text-multilingual-embedding-002": {
|
|
"input_cost_per_character": 2.5e-08,
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "vertex_ai-embedding-models",
|
|
"max_input_tokens": 2048,
|
|
"max_tokens": 2048,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0,
|
|
"output_vector_size": 768,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models"
|
|
},
|
|
"text-multilingual-embedding-preview-0409": {
|
|
"input_cost_per_token": 6.25e-09,
|
|
"litellm_provider": "vertex_ai-embedding-models",
|
|
"max_input_tokens": 3072,
|
|
"max_tokens": 3072,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0,
|
|
"output_vector_size": 768,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models"
|
|
},
|
|
"text-unicorn": {
|
|
"input_cost_per_token": 1e-05,
|
|
"litellm_provider": "vertex_ai-text-models",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 1024,
|
|
"max_tokens": 1024,
|
|
"mode": "completion",
|
|
"output_cost_per_token": 2.8e-05,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models"
|
|
},
|
|
"text-unicorn@001": {
|
|
"input_cost_per_token": 1e-05,
|
|
"litellm_provider": "vertex_ai-text-models",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 1024,
|
|
"max_tokens": 1024,
|
|
"mode": "completion",
|
|
"output_cost_per_token": 2.8e-05,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models"
|
|
},
|
|
"textembedding-gecko": {
|
|
"input_cost_per_character": 2.5e-08,
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "vertex_ai-embedding-models",
|
|
"max_input_tokens": 3072,
|
|
"max_tokens": 3072,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0,
|
|
"output_vector_size": 768,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models"
|
|
},
|
|
"textembedding-gecko-multilingual": {
|
|
"input_cost_per_character": 2.5e-08,
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "vertex_ai-embedding-models",
|
|
"max_input_tokens": 3072,
|
|
"max_tokens": 3072,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0,
|
|
"output_vector_size": 768,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models"
|
|
},
|
|
"textembedding-gecko-multilingual@001": {
|
|
"input_cost_per_character": 2.5e-08,
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "vertex_ai-embedding-models",
|
|
"max_input_tokens": 3072,
|
|
"max_tokens": 3072,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0,
|
|
"output_vector_size": 768,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models"
|
|
},
|
|
"textembedding-gecko@001": {
|
|
"input_cost_per_character": 2.5e-08,
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "vertex_ai-embedding-models",
|
|
"max_input_tokens": 3072,
|
|
"max_tokens": 3072,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0,
|
|
"output_vector_size": 768,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models"
|
|
},
|
|
"textembedding-gecko@003": {
|
|
"input_cost_per_character": 2.5e-08,
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "vertex_ai-embedding-models",
|
|
"max_input_tokens": 3072,
|
|
"max_tokens": 3072,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0,
|
|
"output_vector_size": 768,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models"
|
|
},
|
|
"together-ai-21.1b-41b": {
|
|
"input_cost_per_token": 8e-07,
|
|
"litellm_provider": "together_ai",
|
|
"mode": "chat",
|
|
"output_cost_per_token": 8e-07
|
|
},
|
|
"together-ai-4.1b-8b": {
|
|
"input_cost_per_token": 2e-07,
|
|
"litellm_provider": "together_ai",
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2e-07
|
|
},
|
|
"together-ai-41.1b-80b": {
|
|
"input_cost_per_token": 9e-07,
|
|
"litellm_provider": "together_ai",
|
|
"mode": "chat",
|
|
"output_cost_per_token": 9e-07
|
|
},
|
|
"together-ai-8.1b-21b": {
|
|
"input_cost_per_token": 3e-07,
|
|
"litellm_provider": "together_ai",
|
|
"max_tokens": 1000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-07
|
|
},
|
|
"together-ai-81.1b-110b": {
|
|
"input_cost_per_token": 1.8e-06,
|
|
"litellm_provider": "together_ai",
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.8e-06
|
|
},
|
|
"together-ai-embedding-151m-to-350m": {
|
|
"input_cost_per_token": 1.6e-08,
|
|
"litellm_provider": "together_ai",
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"together-ai-embedding-up-to-150m": {
|
|
"input_cost_per_token": 8e-09,
|
|
"litellm_provider": "together_ai",
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"together_ai/baai/bge-base-en-v1.5": {
|
|
"input_cost_per_token": 8e-09,
|
|
"litellm_provider": "together_ai",
|
|
"max_input_tokens": 512,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0,
|
|
"output_vector_size": 768
|
|
},
|
|
"together_ai/BAAI/bge-base-en-v1.5": {
|
|
"input_cost_per_token": 8e-09,
|
|
"litellm_provider": "together_ai",
|
|
"max_input_tokens": 512,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0,
|
|
"output_vector_size": 768
|
|
},
|
|
"together-ai-up-to-4b": {
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "together_ai",
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-07
|
|
},
|
|
"together_ai/Qwen/Qwen2.5-72B-Instruct-Turbo": {
|
|
"litellm_provider": "together_ai",
|
|
"mode": "chat",
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"together_ai/Qwen/Qwen2.5-7B-Instruct-Turbo": {
|
|
"litellm_provider": "together_ai",
|
|
"mode": "chat",
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"together_ai/Qwen/Qwen3-235B-A22B-Instruct-2507-tput": {
|
|
"input_cost_per_token": 2e-07,
|
|
"litellm_provider": "together_ai",
|
|
"max_input_tokens": 262000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-06,
|
|
"source": "https://www.together.ai/models/qwen3-235b-a22b-instruct-2507-fp8",
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"together_ai/Qwen/Qwen3-235B-A22B-Thinking-2507": {
|
|
"input_cost_per_token": 6.5e-07,
|
|
"litellm_provider": "together_ai",
|
|
"max_input_tokens": 256000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-06,
|
|
"source": "https://www.together.ai/models/qwen3-235b-a22b-thinking-2507",
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"together_ai/Qwen/Qwen3-235B-A22B-fp8-tput": {
|
|
"input_cost_per_token": 2e-07,
|
|
"litellm_provider": "together_ai",
|
|
"max_input_tokens": 40000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-07,
|
|
"source": "https://www.together.ai/models/qwen3-235b-a22b-fp8-tput",
|
|
"supports_function_calling": false,
|
|
"supports_parallel_function_calling": false,
|
|
"supports_tool_choice": false
|
|
},
|
|
"together_ai/Qwen/Qwen3-Coder-480B-A35B-Instruct-FP8": {
|
|
"input_cost_per_token": 2e-06,
|
|
"litellm_provider": "together_ai",
|
|
"max_input_tokens": 256000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2e-06,
|
|
"source": "https://www.together.ai/models/qwen3-coder-480b-a35b-instruct",
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"together_ai/deepseek-ai/DeepSeek-R1": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "together_ai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 20480,
|
|
"max_tokens": 20480,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7e-06,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"together_ai/deepseek-ai/DeepSeek-R1-0528-tput": {
|
|
"input_cost_per_token": 5.5e-07,
|
|
"litellm_provider": "together_ai",
|
|
"max_input_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.19e-06,
|
|
"source": "https://www.together.ai/models/deepseek-r1-0528-throughput",
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"together_ai/deepseek-ai/DeepSeek-V3": {
|
|
"input_cost_per_token": 1.25e-06,
|
|
"litellm_provider": "together_ai",
|
|
"max_input_tokens": 65536,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.25e-06,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"together_ai/deepseek-ai/DeepSeek-V3.1": {
|
|
"input_cost_per_token": 6e-07,
|
|
"litellm_provider": "together_ai",
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.7e-06,
|
|
"source": "https://www.together.ai/models/deepseek-v3-1",
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"together_ai/meta-llama/Llama-3.2-3B-Instruct-Turbo": {
|
|
"litellm_provider": "together_ai",
|
|
"mode": "chat",
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"together_ai/meta-llama/Llama-3.3-70B-Instruct-Turbo": {
|
|
"input_cost_per_token": 8.8e-07,
|
|
"litellm_provider": "together_ai",
|
|
"mode": "chat",
|
|
"output_cost_per_token": 8.8e-07,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"together_ai/meta-llama/Llama-3.3-70B-Instruct-Turbo-Free": {
|
|
"input_cost_per_token": 0,
|
|
"litellm_provider": "together_ai",
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"together_ai/meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8": {
|
|
"input_cost_per_token": 2.7e-07,
|
|
"litellm_provider": "together_ai",
|
|
"mode": "chat",
|
|
"output_cost_per_token": 8.5e-07,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"together_ai/meta-llama/Llama-4-Scout-17B-16E-Instruct": {
|
|
"input_cost_per_token": 1.8e-07,
|
|
"litellm_provider": "together_ai",
|
|
"mode": "chat",
|
|
"output_cost_per_token": 5.9e-07,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"together_ai/meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo": {
|
|
"input_cost_per_token": 3.5e-06,
|
|
"litellm_provider": "together_ai",
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3.5e-06,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"together_ai/meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo": {
|
|
"input_cost_per_token": 8.8e-07,
|
|
"litellm_provider": "together_ai",
|
|
"mode": "chat",
|
|
"output_cost_per_token": 8.8e-07,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"together_ai/meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo": {
|
|
"input_cost_per_token": 1.8e-07,
|
|
"litellm_provider": "together_ai",
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.8e-07,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"together_ai/mistralai/Mistral-7B-Instruct-v0.1": {
|
|
"litellm_provider": "together_ai",
|
|
"mode": "chat",
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"together_ai/mistralai/Mistral-Small-24B-Instruct-2501": {
|
|
"litellm_provider": "together_ai",
|
|
"mode": "chat",
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"together_ai/mistralai/Mixtral-8x7B-Instruct-v0.1": {
|
|
"input_cost_per_token": 6e-07,
|
|
"litellm_provider": "together_ai",
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-07,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"together_ai/moonshotai/Kimi-K2-Instruct": {
|
|
"input_cost_per_token": 1e-06,
|
|
"litellm_provider": "together_ai",
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-06,
|
|
"source": "https://www.together.ai/models/kimi-k2-instruct",
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"together_ai/openai/gpt-oss-120b": {
|
|
"input_cost_per_token": 1.5e-07,
|
|
"litellm_provider": "together_ai",
|
|
"max_input_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-07,
|
|
"source": "https://www.together.ai/models/gpt-oss-120b",
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"together_ai/openai/gpt-oss-20b": {
|
|
"input_cost_per_token": 5e-08,
|
|
"litellm_provider": "together_ai",
|
|
"max_input_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2e-07,
|
|
"source": "https://www.together.ai/models/gpt-oss-20b",
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"together_ai/togethercomputer/CodeLlama-34b-Instruct": {
|
|
"litellm_provider": "together_ai",
|
|
"mode": "chat",
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"together_ai/zai-org/GLM-4.5-Air-FP8": {
|
|
"input_cost_per_token": 2e-07,
|
|
"litellm_provider": "together_ai",
|
|
"max_input_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.1e-06,
|
|
"source": "https://www.together.ai/models/glm-4-5-air",
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"together_ai/zai-org/GLM-4.6": {
|
|
"input_cost_per_token": 0.6e-06,
|
|
"litellm_provider": "together_ai",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 200000,
|
|
"max_tokens": 200000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.2e-06,
|
|
"source": "https://www.together.ai/models/glm-4-6",
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"together_ai/moonshotai/Kimi-K2-Instruct-0905": {
|
|
"input_cost_per_token": 1e-06,
|
|
"litellm_provider": "together_ai",
|
|
"max_input_tokens": 262144,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-06,
|
|
"source": "https://www.together.ai/models/kimi-k2-0905",
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"together_ai/Qwen/Qwen3-Next-80B-A3B-Instruct": {
|
|
"input_cost_per_token": 1.5e-07,
|
|
"litellm_provider": "together_ai",
|
|
"max_input_tokens": 262144,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-06,
|
|
"source": "https://www.together.ai/models/qwen3-next-80b-a3b-instruct",
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"together_ai/Qwen/Qwen3-Next-80B-A3B-Thinking": {
|
|
"input_cost_per_token": 1.5e-07,
|
|
"litellm_provider": "together_ai",
|
|
"max_input_tokens": 262144,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-06,
|
|
"source": "https://www.together.ai/models/qwen3-next-80b-a3b-thinking",
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"tts-1": {
|
|
"input_cost_per_character": 1.5e-05,
|
|
"litellm_provider": "openai",
|
|
"mode": "audio_speech",
|
|
"supported_endpoints": [
|
|
"/v1/audio/speech"
|
|
]
|
|
},
|
|
"tts-1-hd": {
|
|
"input_cost_per_character": 3e-05,
|
|
"litellm_provider": "openai",
|
|
"mode": "audio_speech",
|
|
"supported_endpoints": [
|
|
"/v1/audio/speech"
|
|
]
|
|
},
|
|
"us.amazon.nova-lite-v1:0": {
|
|
"input_cost_per_token": 6e-08,
|
|
"litellm_provider": "bedrock_converse",
|
|
"max_input_tokens": 300000,
|
|
"max_output_tokens": 10000,
|
|
"max_tokens": 10000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.4e-07,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_vision": true
|
|
},
|
|
"us.amazon.nova-micro-v1:0": {
|
|
"input_cost_per_token": 3.5e-08,
|
|
"litellm_provider": "bedrock_converse",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 10000,
|
|
"max_tokens": 10000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.4e-07,
|
|
"supports_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true
|
|
},
|
|
"us.amazon.nova-premier-v1:0": {
|
|
"input_cost_per_token": 2.5e-06,
|
|
"litellm_provider": "bedrock_converse",
|
|
"max_input_tokens": 1000000,
|
|
"max_output_tokens": 10000,
|
|
"max_tokens": 10000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.25e-05,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": false,
|
|
"supports_response_schema": true,
|
|
"supports_vision": true
|
|
},
|
|
"us.amazon.nova-pro-v1:0": {
|
|
"input_cost_per_token": 8e-07,
|
|
"litellm_provider": "bedrock_converse",
|
|
"max_input_tokens": 300000,
|
|
"max_output_tokens": 10000,
|
|
"max_tokens": 10000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3.2e-06,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_vision": true
|
|
},
|
|
"us.anthropic.claude-3-5-haiku-20241022-v1:0": {
|
|
"cache_creation_input_token_cost": 1e-06,
|
|
"cache_read_input_token_cost": 8e-08,
|
|
"input_cost_per_token": 8e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4e-06,
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"us.anthropic.claude-haiku-4-5-20251001-v1:0": {
|
|
"cache_creation_input_token_cost": 1.375e-06,
|
|
"cache_read_input_token_cost": 1.1e-07,
|
|
"input_cost_per_token": 1.1e-06,
|
|
"litellm_provider": "bedrock_converse",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 64000,
|
|
"max_tokens": 64000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 5.5e-06,
|
|
"source": "https://aws.amazon.com/about-aws/whats-new/2025/10/claude-4-5-haiku-anthropic-amazon-bedrock",
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tool_use_system_prompt_tokens": 346
|
|
},
|
|
"us.anthropic.claude-3-5-sonnet-20240620-v1:0": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"us.anthropic.claude-3-5-sonnet-20241022-v2:0": {
|
|
"cache_creation_input_token_cost": 3.75e-06,
|
|
"cache_read_input_token_cost": 3e-07,
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"us.anthropic.claude-3-7-sonnet-20250219-v1:0": {
|
|
"cache_creation_input_token_cost": 3.75e-06,
|
|
"cache_read_input_token_cost": 3e-07,
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "bedrock_converse",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"us.anthropic.claude-3-haiku-20240307-v1:0": {
|
|
"input_cost_per_token": 2.5e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.25e-06,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"us.anthropic.claude-3-opus-20240229-v1:0": {
|
|
"input_cost_per_token": 1.5e-05,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7.5e-05,
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"us.anthropic.claude-3-sonnet-20240229-v1:0": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"us.anthropic.claude-opus-4-1-20250805-v1:0": {
|
|
"cache_creation_input_token_cost": 1.875e-05,
|
|
"cache_read_input_token_cost": 1.5e-06,
|
|
"input_cost_per_token": 1.5e-05,
|
|
"litellm_provider": "bedrock_converse",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 32000,
|
|
"max_tokens": 32000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7.5e-05,
|
|
"search_context_cost_per_query": {
|
|
"search_context_size_high": 0.01,
|
|
"search_context_size_low": 0.01,
|
|
"search_context_size_medium": 0.01
|
|
},
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tool_use_system_prompt_tokens": 159
|
|
},
|
|
"us.anthropic.claude-sonnet-4-5-20250929-v1:0": {
|
|
"cache_creation_input_token_cost": 4.125e-06,
|
|
"cache_read_input_token_cost": 3.3e-07,
|
|
"input_cost_per_token": 3.3e-06,
|
|
"input_cost_per_token_above_200k_tokens": 6.6e-06,
|
|
"output_cost_per_token_above_200k_tokens": 2.475e-05,
|
|
"cache_creation_input_token_cost_above_200k_tokens": 8.25e-06,
|
|
"cache_read_input_token_cost_above_200k_tokens": 6.6e-07,
|
|
"litellm_provider": "bedrock_converse",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 64000,
|
|
"max_tokens": 64000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.65e-05,
|
|
"search_context_cost_per_query": {
|
|
"search_context_size_high": 0.01,
|
|
"search_context_size_low": 0.01,
|
|
"search_context_size_medium": 0.01
|
|
},
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tool_use_system_prompt_tokens": 346
|
|
},
|
|
"au.anthropic.claude-haiku-4-5-20251001-v1:0": {
|
|
"cache_creation_input_token_cost": 1.375e-06,
|
|
"cache_read_input_token_cost": 1.1e-07,
|
|
"input_cost_per_token": 1.1e-06,
|
|
"litellm_provider": "bedrock_converse",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 64000,
|
|
"max_tokens": 64000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 5.5e-06,
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tool_use_system_prompt_tokens": 346
|
|
},
|
|
"us.anthropic.claude-opus-4-20250514-v1:0": {
|
|
"cache_creation_input_token_cost": 1.875e-05,
|
|
"cache_read_input_token_cost": 1.5e-06,
|
|
"input_cost_per_token": 1.5e-05,
|
|
"litellm_provider": "bedrock_converse",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 32000,
|
|
"max_tokens": 32000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7.5e-05,
|
|
"search_context_cost_per_query": {
|
|
"search_context_size_high": 0.01,
|
|
"search_context_size_low": 0.01,
|
|
"search_context_size_medium": 0.01
|
|
},
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tool_use_system_prompt_tokens": 159
|
|
},
|
|
"us.anthropic.claude-opus-4-5-20251101-v1:0": {
|
|
"cache_creation_input_token_cost": 6.25e-06,
|
|
"cache_read_input_token_cost": 5e-07,
|
|
"input_cost_per_token": 5e-06,
|
|
"litellm_provider": "bedrock_converse",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 64000,
|
|
"max_tokens": 64000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.5e-05,
|
|
"search_context_cost_per_query": {
|
|
"search_context_size_high": 0.01,
|
|
"search_context_size_low": 0.01,
|
|
"search_context_size_medium": 0.01
|
|
},
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tool_use_system_prompt_tokens": 159
|
|
},
|
|
"global.anthropic.claude-opus-4-5-20251101-v1:0": {
|
|
"cache_creation_input_token_cost": 6.25e-06,
|
|
"cache_read_input_token_cost": 5e-07,
|
|
"input_cost_per_token": 5e-06,
|
|
"litellm_provider": "bedrock_converse",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 64000,
|
|
"max_tokens": 64000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.5e-05,
|
|
"search_context_cost_per_query": {
|
|
"search_context_size_high": 0.01,
|
|
"search_context_size_low": 0.01,
|
|
"search_context_size_medium": 0.01
|
|
},
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tool_use_system_prompt_tokens": 159
|
|
},
|
|
"us.anthropic.claude-sonnet-4-20250514-v1:0": {
|
|
"cache_creation_input_token_cost": 3.75e-06,
|
|
"cache_read_input_token_cost": 3e-07,
|
|
"input_cost_per_token": 3e-06,
|
|
"input_cost_per_token_above_200k_tokens": 6e-06,
|
|
"output_cost_per_token_above_200k_tokens": 2.25e-05,
|
|
"cache_creation_input_token_cost_above_200k_tokens": 7.5e-06,
|
|
"cache_read_input_token_cost_above_200k_tokens": 6e-07,
|
|
"litellm_provider": "bedrock_converse",
|
|
"max_input_tokens": 1000000,
|
|
"max_output_tokens": 64000,
|
|
"max_tokens": 64000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"search_context_cost_per_query": {
|
|
"search_context_size_high": 0.01,
|
|
"search_context_size_low": 0.01,
|
|
"search_context_size_medium": 0.01
|
|
},
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tool_use_system_prompt_tokens": 159
|
|
},
|
|
"us.deepseek.r1-v1:0": {
|
|
"input_cost_per_token": 1.35e-06,
|
|
"litellm_provider": "bedrock_converse",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 5.4e-06,
|
|
"supports_function_calling": false,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": false
|
|
},
|
|
"us.meta.llama3-1-405b-instruct-v1:0": {
|
|
"input_cost_per_token": 5.32e-06,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.6e-05,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": false
|
|
},
|
|
"us.meta.llama3-1-70b-instruct-v1:0": {
|
|
"input_cost_per_token": 9.9e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 2048,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 9.9e-07,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": false
|
|
},
|
|
"us.meta.llama3-1-8b-instruct-v1:0": {
|
|
"input_cost_per_token": 2.2e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 2048,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.2e-07,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": false
|
|
},
|
|
"us.meta.llama3-2-11b-instruct-v1:0": {
|
|
"input_cost_per_token": 3.5e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3.5e-07,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": false,
|
|
"supports_vision": true
|
|
},
|
|
"us.meta.llama3-2-1b-instruct-v1:0": {
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-07,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": false
|
|
},
|
|
"us.meta.llama3-2-3b-instruct-v1:0": {
|
|
"input_cost_per_token": 1.5e-07,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-07,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": false
|
|
},
|
|
"us.meta.llama3-2-90b-instruct-v1:0": {
|
|
"input_cost_per_token": 2e-06,
|
|
"litellm_provider": "bedrock",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2e-06,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": false,
|
|
"supports_vision": true
|
|
},
|
|
"us.meta.llama3-3-70b-instruct-v1:0": {
|
|
"input_cost_per_token": 7.2e-07,
|
|
"litellm_provider": "bedrock_converse",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7.2e-07,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": false
|
|
},
|
|
"us.meta.llama4-maverick-17b-instruct-v1:0": {
|
|
"input_cost_per_token": 2.4e-07,
|
|
"input_cost_per_token_batches": 1.2e-07,
|
|
"litellm_provider": "bedrock_converse",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 9.7e-07,
|
|
"output_cost_per_token_batches": 4.85e-07,
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text",
|
|
"code"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": false
|
|
},
|
|
"us.meta.llama4-scout-17b-instruct-v1:0": {
|
|
"input_cost_per_token": 1.7e-07,
|
|
"input_cost_per_token_batches": 8.5e-08,
|
|
"litellm_provider": "bedrock_converse",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6.6e-07,
|
|
"output_cost_per_token_batches": 3.3e-07,
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text",
|
|
"code"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": false
|
|
},
|
|
"us.mistral.pixtral-large-2502-v1:0": {
|
|
"input_cost_per_token": 2e-06,
|
|
"litellm_provider": "bedrock_converse",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-06,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": false
|
|
},
|
|
"v0/v0-1.0-md": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "v0",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"v0/v0-1.5-lg": {
|
|
"input_cost_per_token": 1.5e-05,
|
|
"litellm_provider": "v0",
|
|
"max_input_tokens": 512000,
|
|
"max_output_tokens": 512000,
|
|
"max_tokens": 512000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7.5e-05,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"v0/v0-1.5-md": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "v0",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"vercel_ai_gateway/alibaba/qwen-3-14b": {
|
|
"input_cost_per_token": 8e-08,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 40960,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 40960,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.4e-07
|
|
},
|
|
"vercel_ai_gateway/alibaba/qwen-3-235b": {
|
|
"input_cost_per_token": 2e-07,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 40960,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 40960,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-07
|
|
},
|
|
"vercel_ai_gateway/alibaba/qwen-3-30b": {
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 40960,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 40960,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-07
|
|
},
|
|
"vercel_ai_gateway/alibaba/qwen-3-32b": {
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 40960,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 40960,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-07
|
|
},
|
|
"vercel_ai_gateway/alibaba/qwen3-coder": {
|
|
"input_cost_per_token": 4e-07,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 262144,
|
|
"max_output_tokens": 66536,
|
|
"max_tokens": 262144,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.6e-06
|
|
},
|
|
"vercel_ai_gateway/amazon/nova-lite": {
|
|
"input_cost_per_token": 6e-08,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 300000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 300000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.4e-07
|
|
},
|
|
"vercel_ai_gateway/amazon/nova-micro": {
|
|
"input_cost_per_token": 3.5e-08,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.4e-07
|
|
},
|
|
"vercel_ai_gateway/amazon/nova-pro": {
|
|
"input_cost_per_token": 8e-07,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 300000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 300000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3.2e-06
|
|
},
|
|
"vercel_ai_gateway/amazon/titan-embed-text-v2": {
|
|
"input_cost_per_token": 2e-08,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 0,
|
|
"max_output_tokens": 0,
|
|
"max_tokens": 0,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"vercel_ai_gateway/anthropic/claude-3-haiku": {
|
|
"cache_creation_input_token_cost": 3e-07,
|
|
"cache_read_input_token_cost": 3e-08,
|
|
"input_cost_per_token": 2.5e-07,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 200000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.25e-06
|
|
},
|
|
"vercel_ai_gateway/anthropic/claude-3-opus": {
|
|
"cache_creation_input_token_cost": 1.875e-05,
|
|
"cache_read_input_token_cost": 1.5e-06,
|
|
"input_cost_per_token": 1.5e-05,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 200000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7.5e-05
|
|
},
|
|
"vercel_ai_gateway/anthropic/claude-3.5-haiku": {
|
|
"cache_creation_input_token_cost": 1e-06,
|
|
"cache_read_input_token_cost": 8e-08,
|
|
"input_cost_per_token": 8e-07,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 200000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4e-06
|
|
},
|
|
"vercel_ai_gateway/anthropic/claude-3.5-sonnet": {
|
|
"cache_creation_input_token_cost": 3.75e-06,
|
|
"cache_read_input_token_cost": 3e-07,
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 200000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05
|
|
},
|
|
"vercel_ai_gateway/anthropic/claude-3.7-sonnet": {
|
|
"cache_creation_input_token_cost": 3.75e-06,
|
|
"cache_read_input_token_cost": 3e-07,
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 64000,
|
|
"max_tokens": 200000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05
|
|
},
|
|
"vercel_ai_gateway/anthropic/claude-4-opus": {
|
|
"cache_creation_input_token_cost": 1.875e-05,
|
|
"cache_read_input_token_cost": 1.5e-06,
|
|
"input_cost_per_token": 1.5e-05,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 32000,
|
|
"max_tokens": 200000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7.5e-05
|
|
},
|
|
"vercel_ai_gateway/anthropic/claude-4-sonnet": {
|
|
"cache_creation_input_token_cost": 3.75e-06,
|
|
"cache_read_input_token_cost": 3e-07,
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 64000,
|
|
"max_tokens": 200000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05
|
|
},
|
|
"vercel_ai_gateway/cohere/command-a": {
|
|
"input_cost_per_token": 2.5e-06,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 256000,
|
|
"max_output_tokens": 8000,
|
|
"max_tokens": 256000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-05
|
|
},
|
|
"vercel_ai_gateway/cohere/command-r": {
|
|
"input_cost_per_token": 1.5e-07,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-07
|
|
},
|
|
"vercel_ai_gateway/cohere/command-r-plus": {
|
|
"input_cost_per_token": 2.5e-06,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-05
|
|
},
|
|
"vercel_ai_gateway/cohere/embed-v4.0": {
|
|
"input_cost_per_token": 1.2e-07,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 0,
|
|
"max_output_tokens": 0,
|
|
"max_tokens": 0,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"vercel_ai_gateway/deepseek/deepseek-r1": {
|
|
"input_cost_per_token": 5.5e-07,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.19e-06
|
|
},
|
|
"vercel_ai_gateway/deepseek/deepseek-r1-distill-llama-70b": {
|
|
"input_cost_per_token": 7.5e-07,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 9.9e-07
|
|
},
|
|
"vercel_ai_gateway/deepseek/deepseek-v3": {
|
|
"input_cost_per_token": 9e-07,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 9e-07
|
|
},
|
|
"vercel_ai_gateway/google/gemini-2.0-flash": {
|
|
"input_cost_per_token": 1.5e-07,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 1048576,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-07
|
|
},
|
|
"vercel_ai_gateway/google/gemini-2.0-flash-lite": {
|
|
"input_cost_per_token": 7.5e-08,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 1048576,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-07
|
|
},
|
|
"vercel_ai_gateway/google/gemini-2.5-flash": {
|
|
"input_cost_per_token": 3e-07,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 1000000,
|
|
"max_output_tokens": 65536,
|
|
"max_tokens": 1000000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.5e-06
|
|
},
|
|
"vercel_ai_gateway/google/gemini-2.5-pro": {
|
|
"input_cost_per_token": 2.5e-06,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 1048576,
|
|
"max_output_tokens": 65536,
|
|
"max_tokens": 1048576,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-05
|
|
},
|
|
"vercel_ai_gateway/google/gemini-embedding-001": {
|
|
"input_cost_per_token": 1.5e-07,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 0,
|
|
"max_output_tokens": 0,
|
|
"max_tokens": 0,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"vercel_ai_gateway/google/gemma-2-9b": {
|
|
"input_cost_per_token": 2e-07,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2e-07
|
|
},
|
|
"vercel_ai_gateway/google/text-embedding-005": {
|
|
"input_cost_per_token": 2.5e-08,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 0,
|
|
"max_output_tokens": 0,
|
|
"max_tokens": 0,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"vercel_ai_gateway/google/text-multilingual-embedding-002": {
|
|
"input_cost_per_token": 2.5e-08,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 0,
|
|
"max_output_tokens": 0,
|
|
"max_tokens": 0,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"vercel_ai_gateway/inception/mercury-coder-small": {
|
|
"input_cost_per_token": 2.5e-07,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 32000,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 32000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-06
|
|
},
|
|
"vercel_ai_gateway/meta/llama-3-70b": {
|
|
"input_cost_per_token": 5.9e-07,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7.9e-07
|
|
},
|
|
"vercel_ai_gateway/meta/llama-3-8b": {
|
|
"input_cost_per_token": 5e-08,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 8e-08
|
|
},
|
|
"vercel_ai_gateway/meta/llama-3.1-70b": {
|
|
"input_cost_per_token": 7.2e-07,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7.2e-07
|
|
},
|
|
"vercel_ai_gateway/meta/llama-3.1-8b": {
|
|
"input_cost_per_token": 5e-08,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 131000,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 8e-08
|
|
},
|
|
"vercel_ai_gateway/meta/llama-3.2-11b": {
|
|
"input_cost_per_token": 1.6e-07,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.6e-07
|
|
},
|
|
"vercel_ai_gateway/meta/llama-3.2-1b": {
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-07
|
|
},
|
|
"vercel_ai_gateway/meta/llama-3.2-3b": {
|
|
"input_cost_per_token": 1.5e-07,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-07
|
|
},
|
|
"vercel_ai_gateway/meta/llama-3.2-90b": {
|
|
"input_cost_per_token": 7.2e-07,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7.2e-07
|
|
},
|
|
"vercel_ai_gateway/meta/llama-3.3-70b": {
|
|
"input_cost_per_token": 7.2e-07,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7.2e-07
|
|
},
|
|
"vercel_ai_gateway/meta/llama-4-maverick": {
|
|
"input_cost_per_token": 2e-07,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-07
|
|
},
|
|
"vercel_ai_gateway/meta/llama-4-scout": {
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-07
|
|
},
|
|
"vercel_ai_gateway/mistral/codestral": {
|
|
"input_cost_per_token": 3e-07,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 256000,
|
|
"max_output_tokens": 4000,
|
|
"max_tokens": 256000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 9e-07
|
|
},
|
|
"vercel_ai_gateway/mistral/codestral-embed": {
|
|
"input_cost_per_token": 1.5e-07,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 0,
|
|
"max_output_tokens": 0,
|
|
"max_tokens": 0,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"vercel_ai_gateway/mistral/devstral-small": {
|
|
"input_cost_per_token": 7e-08,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.8e-07
|
|
},
|
|
"vercel_ai_gateway/mistral/magistral-medium": {
|
|
"input_cost_per_token": 2e-06,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 64000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 5e-06
|
|
},
|
|
"vercel_ai_gateway/mistral/magistral-small": {
|
|
"input_cost_per_token": 5e-07,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 64000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-06
|
|
},
|
|
"vercel_ai_gateway/mistral/ministral-3b": {
|
|
"input_cost_per_token": 4e-08,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4e-08
|
|
},
|
|
"vercel_ai_gateway/mistral/ministral-8b": {
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-07
|
|
},
|
|
"vercel_ai_gateway/mistral/mistral-embed": {
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 0,
|
|
"max_output_tokens": 0,
|
|
"max_tokens": 0,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"vercel_ai_gateway/mistral/mistral-large": {
|
|
"input_cost_per_token": 2e-06,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 32000,
|
|
"max_output_tokens": 4000,
|
|
"max_tokens": 32000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-06
|
|
},
|
|
"vercel_ai_gateway/mistral/mistral-saba-24b": {
|
|
"input_cost_per_token": 7.9e-07,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 32768,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7.9e-07
|
|
},
|
|
"vercel_ai_gateway/mistral/mistral-small": {
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 32000,
|
|
"max_output_tokens": 4000,
|
|
"max_tokens": 32000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-07
|
|
},
|
|
"vercel_ai_gateway/mistral/mixtral-8x22b-instruct": {
|
|
"input_cost_per_token": 1.2e-06,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 65536,
|
|
"max_output_tokens": 2048,
|
|
"max_tokens": 65536,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.2e-06
|
|
},
|
|
"vercel_ai_gateway/mistral/pixtral-12b": {
|
|
"input_cost_per_token": 1.5e-07,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-07
|
|
},
|
|
"vercel_ai_gateway/mistral/pixtral-large": {
|
|
"input_cost_per_token": 2e-06,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-06
|
|
},
|
|
"vercel_ai_gateway/moonshotai/kimi-k2": {
|
|
"input_cost_per_token": 5.5e-07,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.2e-06
|
|
},
|
|
"vercel_ai_gateway/morph/morph-v3-fast": {
|
|
"input_cost_per_token": 8e-07,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 32768,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.2e-06
|
|
},
|
|
"vercel_ai_gateway/morph/morph-v3-large": {
|
|
"input_cost_per_token": 9e-07,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 32768,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.9e-06
|
|
},
|
|
"vercel_ai_gateway/openai/gpt-3.5-turbo": {
|
|
"input_cost_per_token": 5e-07,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 16385,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 16385,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-06
|
|
},
|
|
"vercel_ai_gateway/openai/gpt-3.5-turbo-instruct": {
|
|
"input_cost_per_token": 1.5e-06,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2e-06
|
|
},
|
|
"vercel_ai_gateway/openai/gpt-4-turbo": {
|
|
"input_cost_per_token": 1e-05,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-05
|
|
},
|
|
"vercel_ai_gateway/openai/gpt-4.1": {
|
|
"cache_creation_input_token_cost": 0.0,
|
|
"cache_read_input_token_cost": 5e-07,
|
|
"input_cost_per_token": 2e-06,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 1047576,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 1047576,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 8e-06
|
|
},
|
|
"vercel_ai_gateway/openai/gpt-4.1-mini": {
|
|
"cache_creation_input_token_cost": 0.0,
|
|
"cache_read_input_token_cost": 1e-07,
|
|
"input_cost_per_token": 4e-07,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 1047576,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 1047576,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.6e-06
|
|
},
|
|
"vercel_ai_gateway/openai/gpt-4.1-nano": {
|
|
"cache_creation_input_token_cost": 0.0,
|
|
"cache_read_input_token_cost": 2.5e-08,
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 1047576,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 1047576,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4e-07
|
|
},
|
|
"vercel_ai_gateway/openai/gpt-4o": {
|
|
"cache_creation_input_token_cost": 0.0,
|
|
"cache_read_input_token_cost": 1.25e-06,
|
|
"input_cost_per_token": 2.5e-06,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-05
|
|
},
|
|
"vercel_ai_gateway/openai/gpt-4o-mini": {
|
|
"cache_creation_input_token_cost": 0.0,
|
|
"cache_read_input_token_cost": 7.5e-08,
|
|
"input_cost_per_token": 1.5e-07,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-07
|
|
},
|
|
"vercel_ai_gateway/openai/o1": {
|
|
"cache_creation_input_token_cost": 0.0,
|
|
"cache_read_input_token_cost": 7.5e-06,
|
|
"input_cost_per_token": 1.5e-05,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 100000,
|
|
"max_tokens": 200000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-05
|
|
},
|
|
"vercel_ai_gateway/openai/o3": {
|
|
"cache_creation_input_token_cost": 0.0,
|
|
"cache_read_input_token_cost": 5e-07,
|
|
"input_cost_per_token": 2e-06,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 100000,
|
|
"max_tokens": 200000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 8e-06
|
|
},
|
|
"vercel_ai_gateway/openai/o3-mini": {
|
|
"cache_creation_input_token_cost": 0.0,
|
|
"cache_read_input_token_cost": 5.5e-07,
|
|
"input_cost_per_token": 1.1e-06,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 100000,
|
|
"max_tokens": 200000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4.4e-06
|
|
},
|
|
"vercel_ai_gateway/openai/o4-mini": {
|
|
"cache_creation_input_token_cost": 0.0,
|
|
"cache_read_input_token_cost": 2.75e-07,
|
|
"input_cost_per_token": 1.1e-06,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 100000,
|
|
"max_tokens": 200000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4.4e-06
|
|
},
|
|
"vercel_ai_gateway/openai/text-embedding-3-large": {
|
|
"input_cost_per_token": 1.3e-07,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 0,
|
|
"max_output_tokens": 0,
|
|
"max_tokens": 0,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"vercel_ai_gateway/openai/text-embedding-3-small": {
|
|
"input_cost_per_token": 2e-08,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 0,
|
|
"max_output_tokens": 0,
|
|
"max_tokens": 0,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"vercel_ai_gateway/openai/text-embedding-ada-002": {
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 0,
|
|
"max_output_tokens": 0,
|
|
"max_tokens": 0,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"vercel_ai_gateway/perplexity/sonar": {
|
|
"input_cost_per_token": 1e-06,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 127000,
|
|
"max_output_tokens": 8000,
|
|
"max_tokens": 127000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-06
|
|
},
|
|
"vercel_ai_gateway/perplexity/sonar-pro": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 8000,
|
|
"max_tokens": 200000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05
|
|
},
|
|
"vercel_ai_gateway/perplexity/sonar-reasoning": {
|
|
"input_cost_per_token": 1e-06,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 127000,
|
|
"max_output_tokens": 8000,
|
|
"max_tokens": 127000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 5e-06
|
|
},
|
|
"vercel_ai_gateway/perplexity/sonar-reasoning-pro": {
|
|
"input_cost_per_token": 2e-06,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 127000,
|
|
"max_output_tokens": 8000,
|
|
"max_tokens": 127000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 8e-06
|
|
},
|
|
"vercel_ai_gateway/vercel/v0-1.0-md": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 32000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05
|
|
},
|
|
"vercel_ai_gateway/vercel/v0-1.5-md": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05
|
|
},
|
|
"vercel_ai_gateway/xai/grok-2": {
|
|
"input_cost_per_token": 2e-06,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 4000,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-05
|
|
},
|
|
"vercel_ai_gateway/xai/grok-2-vision": {
|
|
"input_cost_per_token": 2e-06,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 32768,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-05
|
|
},
|
|
"vercel_ai_gateway/xai/grok-3": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05
|
|
},
|
|
"vercel_ai_gateway/xai/grok-3-fast": {
|
|
"input_cost_per_token": 5e-06,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.5e-05
|
|
},
|
|
"vercel_ai_gateway/xai/grok-3-mini": {
|
|
"input_cost_per_token": 3e-07,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 5e-07
|
|
},
|
|
"vercel_ai_gateway/xai/grok-3-mini-fast": {
|
|
"input_cost_per_token": 6e-07,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4e-06
|
|
},
|
|
"vercel_ai_gateway/xai/grok-4": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 256000,
|
|
"max_output_tokens": 256000,
|
|
"max_tokens": 256000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05
|
|
},
|
|
"vercel_ai_gateway/zai/glm-4.5": {
|
|
"input_cost_per_token": 6e-07,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.2e-06
|
|
},
|
|
"vercel_ai_gateway/zai/glm-4.5-air": {
|
|
"input_cost_per_token": 2e-07,
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 96000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.1e-06
|
|
},
|
|
"vercel_ai_gateway/zai/glm-4.6": {
|
|
"litellm_provider": "vercel_ai_gateway",
|
|
"cache_read_input_token_cost": 1.1e-07,
|
|
"input_cost_per_token": 4.5e-07,
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 200000,
|
|
"max_tokens": 200000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.8e-06,
|
|
"source": "https://vercel.com/ai-gateway/models/glm-4.6",
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"vertex_ai/chirp": {
|
|
"input_cost_per_character": 30e-06,
|
|
"litellm_provider": "vertex_ai",
|
|
"mode": "audio_speech",
|
|
"source": "https://cloud.google.com/text-to-speech/pricing",
|
|
"supported_endpoints": [
|
|
"/v1/audio/speech"
|
|
]
|
|
},
|
|
"vertex_ai/claude-3-5-haiku": {
|
|
"input_cost_per_token": 1e-06,
|
|
"litellm_provider": "vertex_ai-anthropic_models",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 5e-06,
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"vertex_ai/claude-3-5-haiku@20241022": {
|
|
"input_cost_per_token": 1e-06,
|
|
"litellm_provider": "vertex_ai-anthropic_models",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 5e-06,
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"vertex_ai/claude-haiku-4-5@20251001": {
|
|
"cache_creation_input_token_cost": 1.25e-06,
|
|
"cache_read_input_token_cost": 1e-07,
|
|
"input_cost_per_token": 1e-06,
|
|
"litellm_provider": "vertex_ai-anthropic_models",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 5e-06,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/partner-models/claude/haiku-4-5",
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"vertex_ai/claude-3-5-sonnet": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "vertex_ai-anthropic_models",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"vertex_ai/claude-3-5-sonnet-v2": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "vertex_ai-anthropic_models",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"vertex_ai/claude-3-5-sonnet-v2@20241022": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "vertex_ai-anthropic_models",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"vertex_ai/claude-3-5-sonnet@20240620": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "vertex_ai-anthropic_models",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"vertex_ai/claude-3-7-sonnet@20250219": {
|
|
"cache_creation_input_token_cost": 3.75e-06,
|
|
"cache_read_input_token_cost": 3e-07,
|
|
"deprecation_date": "2025-06-01",
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "vertex_ai-anthropic_models",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tool_use_system_prompt_tokens": 159
|
|
},
|
|
"vertex_ai/claude-3-haiku": {
|
|
"input_cost_per_token": 2.5e-07,
|
|
"litellm_provider": "vertex_ai-anthropic_models",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.25e-06,
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"vertex_ai/claude-3-haiku@20240307": {
|
|
"input_cost_per_token": 2.5e-07,
|
|
"litellm_provider": "vertex_ai-anthropic_models",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.25e-06,
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"vertex_ai/claude-3-opus": {
|
|
"input_cost_per_token": 1.5e-05,
|
|
"litellm_provider": "vertex_ai-anthropic_models",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7.5e-05,
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"vertex_ai/claude-3-opus@20240229": {
|
|
"input_cost_per_token": 1.5e-05,
|
|
"litellm_provider": "vertex_ai-anthropic_models",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7.5e-05,
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"vertex_ai/claude-3-sonnet": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "vertex_ai-anthropic_models",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"vertex_ai/claude-3-sonnet@20240229": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "vertex_ai-anthropic_models",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"vertex_ai/claude-opus-4": {
|
|
"cache_creation_input_token_cost": 1.875e-05,
|
|
"cache_read_input_token_cost": 1.5e-06,
|
|
"input_cost_per_token": 1.5e-05,
|
|
"litellm_provider": "vertex_ai-anthropic_models",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 32000,
|
|
"max_tokens": 32000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7.5e-05,
|
|
"search_context_cost_per_query": {
|
|
"search_context_size_high": 0.01,
|
|
"search_context_size_low": 0.01,
|
|
"search_context_size_medium": 0.01
|
|
},
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tool_use_system_prompt_tokens": 159
|
|
},
|
|
"vertex_ai/claude-opus-4-1": {
|
|
"cache_creation_input_token_cost": 1.875e-05,
|
|
"cache_read_input_token_cost": 1.5e-06,
|
|
"input_cost_per_token": 1.5e-05,
|
|
"input_cost_per_token_batches": 7.5e-06,
|
|
"litellm_provider": "vertex_ai-anthropic_models",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 32000,
|
|
"max_tokens": 32000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7.5e-05,
|
|
"output_cost_per_token_batches": 3.75e-05,
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"vertex_ai/claude-opus-4-1@20250805": {
|
|
"cache_creation_input_token_cost": 1.875e-05,
|
|
"cache_read_input_token_cost": 1.5e-06,
|
|
"input_cost_per_token": 1.5e-05,
|
|
"input_cost_per_token_batches": 7.5e-06,
|
|
"litellm_provider": "vertex_ai-anthropic_models",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 32000,
|
|
"max_tokens": 32000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7.5e-05,
|
|
"output_cost_per_token_batches": 3.75e-05,
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"vertex_ai/claude-opus-4-5": {
|
|
"cache_creation_input_token_cost": 6.25e-06,
|
|
"cache_read_input_token_cost": 5e-07,
|
|
"input_cost_per_token": 5e-06,
|
|
"litellm_provider": "vertex_ai-anthropic_models",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 64000,
|
|
"max_tokens": 64000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.5e-05,
|
|
"search_context_cost_per_query": {
|
|
"search_context_size_high": 0.01,
|
|
"search_context_size_low": 0.01,
|
|
"search_context_size_medium": 0.01
|
|
},
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tool_use_system_prompt_tokens": 159
|
|
},
|
|
"vertex_ai/claude-opus-4-5@20251101": {
|
|
"cache_creation_input_token_cost": 6.25e-06,
|
|
"cache_read_input_token_cost": 5e-07,
|
|
"input_cost_per_token": 5e-06,
|
|
"litellm_provider": "vertex_ai-anthropic_models",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 64000,
|
|
"max_tokens": 64000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.5e-05,
|
|
"search_context_cost_per_query": {
|
|
"search_context_size_high": 0.01,
|
|
"search_context_size_low": 0.01,
|
|
"search_context_size_medium": 0.01
|
|
},
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tool_use_system_prompt_tokens": 159
|
|
},
|
|
"vertex_ai/claude-sonnet-4-5": {
|
|
"cache_creation_input_token_cost": 3.75e-06,
|
|
"cache_read_input_token_cost": 3e-07,
|
|
"input_cost_per_token": 3e-06,
|
|
"input_cost_per_token_above_200k_tokens": 6e-06,
|
|
"output_cost_per_token_above_200k_tokens": 2.25e-05,
|
|
"cache_creation_input_token_cost_above_200k_tokens": 7.5e-06,
|
|
"cache_read_input_token_cost_above_200k_tokens": 6e-07,
|
|
"input_cost_per_token_batches": 1.5e-06,
|
|
"litellm_provider": "vertex_ai-anthropic_models",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 64000,
|
|
"max_tokens": 64000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"output_cost_per_token_batches": 7.5e-06,
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"vertex_ai/claude-sonnet-4-5@20250929": {
|
|
"cache_creation_input_token_cost": 3.75e-06,
|
|
"cache_read_input_token_cost": 3e-07,
|
|
"input_cost_per_token": 3e-06,
|
|
"input_cost_per_token_above_200k_tokens": 6e-06,
|
|
"output_cost_per_token_above_200k_tokens": 2.25e-05,
|
|
"cache_creation_input_token_cost_above_200k_tokens": 7.5e-06,
|
|
"cache_read_input_token_cost_above_200k_tokens": 6e-07,
|
|
"input_cost_per_token_batches": 1.5e-06,
|
|
"litellm_provider": "vertex_ai-anthropic_models",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 64000,
|
|
"max_tokens": 64000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"output_cost_per_token_batches": 7.5e-06,
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"vertex_ai/claude-opus-4@20250514": {
|
|
"cache_creation_input_token_cost": 1.875e-05,
|
|
"cache_read_input_token_cost": 1.5e-06,
|
|
"input_cost_per_token": 1.5e-05,
|
|
"litellm_provider": "vertex_ai-anthropic_models",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 32000,
|
|
"max_tokens": 32000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7.5e-05,
|
|
"search_context_cost_per_query": {
|
|
"search_context_size_high": 0.01,
|
|
"search_context_size_low": 0.01,
|
|
"search_context_size_medium": 0.01
|
|
},
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tool_use_system_prompt_tokens": 159
|
|
},
|
|
"vertex_ai/claude-sonnet-4": {
|
|
"cache_creation_input_token_cost": 3.75e-06,
|
|
"cache_read_input_token_cost": 3e-07,
|
|
"input_cost_per_token": 3e-06,
|
|
"input_cost_per_token_above_200k_tokens": 6e-06,
|
|
"output_cost_per_token_above_200k_tokens": 2.25e-05,
|
|
"cache_creation_input_token_cost_above_200k_tokens": 7.5e-06,
|
|
"cache_read_input_token_cost_above_200k_tokens": 6e-07,
|
|
"litellm_provider": "vertex_ai-anthropic_models",
|
|
"max_input_tokens": 1000000,
|
|
"max_output_tokens": 64000,
|
|
"max_tokens": 64000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"search_context_cost_per_query": {
|
|
"search_context_size_high": 0.01,
|
|
"search_context_size_low": 0.01,
|
|
"search_context_size_medium": 0.01
|
|
},
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tool_use_system_prompt_tokens": 159
|
|
},
|
|
"vertex_ai/claude-sonnet-4@20250514": {
|
|
"cache_creation_input_token_cost": 3.75e-06,
|
|
"cache_read_input_token_cost": 3e-07,
|
|
"input_cost_per_token": 3e-06,
|
|
"input_cost_per_token_above_200k_tokens": 6e-06,
|
|
"output_cost_per_token_above_200k_tokens": 2.25e-05,
|
|
"cache_creation_input_token_cost_above_200k_tokens": 7.5e-06,
|
|
"cache_read_input_token_cost_above_200k_tokens": 6e-07,
|
|
"litellm_provider": "vertex_ai-anthropic_models",
|
|
"max_input_tokens": 1000000,
|
|
"max_output_tokens": 64000,
|
|
"max_tokens": 64000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"search_context_cost_per_query": {
|
|
"search_context_size_high": 0.01,
|
|
"search_context_size_low": 0.01,
|
|
"search_context_size_medium": 0.01
|
|
},
|
|
"supports_assistant_prefill": true,
|
|
"supports_computer_use": true,
|
|
"supports_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"tool_use_system_prompt_tokens": 159
|
|
},
|
|
"vertex_ai/mistralai/codestral-2@001": {
|
|
"input_cost_per_token": 3e-07,
|
|
"litellm_provider": "vertex_ai-mistral_models",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 9e-07,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"vertex_ai/codestral-2": {
|
|
"input_cost_per_token": 3e-07,
|
|
"litellm_provider": "vertex_ai-mistral_models",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 9e-07,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"vertex_ai/codestral-2@001": {
|
|
"input_cost_per_token": 3e-07,
|
|
"litellm_provider": "vertex_ai-mistral_models",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 9e-07,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"vertex_ai/mistralai/codestral-2": {
|
|
"input_cost_per_token": 3e-07,
|
|
"litellm_provider": "vertex_ai-mistral_models",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 9e-07,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"vertex_ai/codestral-2501": {
|
|
"input_cost_per_token": 2e-07,
|
|
"litellm_provider": "vertex_ai-mistral_models",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-07,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"vertex_ai/codestral@2405": {
|
|
"input_cost_per_token": 2e-07,
|
|
"litellm_provider": "vertex_ai-mistral_models",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-07,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"vertex_ai/codestral@latest": {
|
|
"input_cost_per_token": 2e-07,
|
|
"litellm_provider": "vertex_ai-mistral_models",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-07,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"vertex_ai/deepseek-ai/deepseek-v3.1-maas": {
|
|
"input_cost_per_token": 1.35e-06,
|
|
"litellm_provider": "vertex_ai-deepseek_models",
|
|
"max_input_tokens": 163840,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 163840,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 5.4e-06,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/pricing#partner-models",
|
|
"supported_regions": [
|
|
"us-west2"
|
|
],
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"vertex_ai/deepseek-ai/deepseek-r1-0528-maas": {
|
|
"input_cost_per_token": 1.35e-06,
|
|
"litellm_provider": "vertex_ai-deepseek_models",
|
|
"max_input_tokens": 65336,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 5.4e-06,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/pricing#partner-models",
|
|
"supports_assistant_prefill": true,
|
|
"supports_function_calling": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"vertex_ai/gemini-2.5-flash-image": {
|
|
"cache_read_input_token_cost": 3e-08,
|
|
"input_cost_per_audio_token": 1e-06,
|
|
"input_cost_per_token": 3e-07,
|
|
"litellm_provider": "vertex_ai-language-models",
|
|
"max_audio_length_hours": 8.4,
|
|
"max_audio_per_prompt": 1,
|
|
"max_images_per_prompt": 3000,
|
|
"max_input_tokens": 32768,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"max_pdf_size_mb": 30,
|
|
"max_video_length": 1,
|
|
"max_videos_per_prompt": 10,
|
|
"mode": "image_generation",
|
|
"output_cost_per_image": 0.039,
|
|
"output_cost_per_reasoning_token": 2.5e-06,
|
|
"output_cost_per_token": 2.5e-06,
|
|
"rpm": 100000,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/multimodal/image-generation#edit-an-image",
|
|
"supported_endpoints": [
|
|
"/v1/chat/completions",
|
|
"/v1/completions",
|
|
"/v1/batch"
|
|
],
|
|
"supported_modalities": [
|
|
"text",
|
|
"image",
|
|
"audio",
|
|
"video"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supports_audio_output": false,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_pdf_input": true,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_url_context": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": false,
|
|
"tpm": 8000000
|
|
},
|
|
"vertex_ai/gemini-3-pro-image-preview": {
|
|
"input_cost_per_image": 0.0011,
|
|
"input_cost_per_token": 2e-06,
|
|
"input_cost_per_token_batches": 1e-06,
|
|
"litellm_provider": "vertex_ai-language-models",
|
|
"max_input_tokens": 65536,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 65536,
|
|
"mode": "image_generation",
|
|
"output_cost_per_image": 0.134,
|
|
"output_cost_per_image_token": 1.2e-04,
|
|
"output_cost_per_token": 1.2e-05,
|
|
"output_cost_per_token_batches": 6e-06,
|
|
"source": "https://docs.cloud.google.com/vertex-ai/generative-ai/docs/models/gemini/3-pro-image"
|
|
},
|
|
"vertex_ai/imagegeneration@006": {
|
|
"litellm_provider": "vertex_ai-image-models",
|
|
"mode": "image_generation",
|
|
"output_cost_per_image": 0.02,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/pricing"
|
|
},
|
|
"vertex_ai/imagen-3.0-fast-generate-001": {
|
|
"litellm_provider": "vertex_ai-image-models",
|
|
"mode": "image_generation",
|
|
"output_cost_per_image": 0.02,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/pricing"
|
|
},
|
|
"vertex_ai/imagen-3.0-generate-001": {
|
|
"litellm_provider": "vertex_ai-image-models",
|
|
"mode": "image_generation",
|
|
"output_cost_per_image": 0.04,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/pricing"
|
|
},
|
|
"vertex_ai/imagen-3.0-generate-002": {
|
|
"litellm_provider": "vertex_ai-image-models",
|
|
"mode": "image_generation",
|
|
"output_cost_per_image": 0.04,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/pricing"
|
|
},
|
|
"vertex_ai/imagen-3.0-capability-001": {
|
|
"litellm_provider": "vertex_ai-image-models",
|
|
"mode": "image_generation",
|
|
"output_cost_per_image": 0.04,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/image/edit-insert-objects"
|
|
},
|
|
"vertex_ai/imagen-4.0-fast-generate-001": {
|
|
"litellm_provider": "vertex_ai-image-models",
|
|
"mode": "image_generation",
|
|
"output_cost_per_image": 0.02,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/pricing"
|
|
},
|
|
"vertex_ai/imagen-4.0-generate-001": {
|
|
"litellm_provider": "vertex_ai-image-models",
|
|
"mode": "image_generation",
|
|
"output_cost_per_image": 0.04,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/pricing"
|
|
},
|
|
"vertex_ai/imagen-4.0-ultra-generate-001": {
|
|
"litellm_provider": "vertex_ai-image-models",
|
|
"mode": "image_generation",
|
|
"output_cost_per_image": 0.06,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/pricing"
|
|
},
|
|
"vertex_ai/jamba-1.5": {
|
|
"input_cost_per_token": 2e-07,
|
|
"litellm_provider": "vertex_ai-ai21_models",
|
|
"max_input_tokens": 256000,
|
|
"max_output_tokens": 256000,
|
|
"max_tokens": 256000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4e-07,
|
|
"supports_tool_choice": true
|
|
},
|
|
"vertex_ai/jamba-1.5-large": {
|
|
"input_cost_per_token": 2e-06,
|
|
"litellm_provider": "vertex_ai-ai21_models",
|
|
"max_input_tokens": 256000,
|
|
"max_output_tokens": 256000,
|
|
"max_tokens": 256000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 8e-06,
|
|
"supports_tool_choice": true
|
|
},
|
|
"vertex_ai/jamba-1.5-large@001": {
|
|
"input_cost_per_token": 2e-06,
|
|
"litellm_provider": "vertex_ai-ai21_models",
|
|
"max_input_tokens": 256000,
|
|
"max_output_tokens": 256000,
|
|
"max_tokens": 256000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 8e-06,
|
|
"supports_tool_choice": true
|
|
},
|
|
"vertex_ai/jamba-1.5-mini": {
|
|
"input_cost_per_token": 2e-07,
|
|
"litellm_provider": "vertex_ai-ai21_models",
|
|
"max_input_tokens": 256000,
|
|
"max_output_tokens": 256000,
|
|
"max_tokens": 256000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4e-07,
|
|
"supports_tool_choice": true
|
|
},
|
|
"vertex_ai/jamba-1.5-mini@001": {
|
|
"input_cost_per_token": 2e-07,
|
|
"litellm_provider": "vertex_ai-ai21_models",
|
|
"max_input_tokens": 256000,
|
|
"max_output_tokens": 256000,
|
|
"max_tokens": 256000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4e-07,
|
|
"supports_tool_choice": true
|
|
},
|
|
"vertex_ai/meta/llama-3.1-405b-instruct-maas": {
|
|
"input_cost_per_token": 5e-06,
|
|
"litellm_provider": "vertex_ai-llama_models",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 2048,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.6e-05,
|
|
"source": "https://console.cloud.google.com/vertex-ai/publishers/meta/model-garden/llama-3.2-90b-vision-instruct-maas",
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"vertex_ai/meta/llama-3.1-70b-instruct-maas": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "vertex_ai-llama_models",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 2048,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.0,
|
|
"source": "https://console.cloud.google.com/vertex-ai/publishers/meta/model-garden/llama-3.2-90b-vision-instruct-maas",
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"vertex_ai/meta/llama-3.1-8b-instruct-maas": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "vertex_ai-llama_models",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 2048,
|
|
"max_tokens": 128000,
|
|
"metadata": {
|
|
"notes": "VertexAI states that The Llama 3.1 API service for llama-3.1-70b-instruct-maas and llama-3.1-8b-instruct-maas are in public preview and at no cost."
|
|
},
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.0,
|
|
"source": "https://console.cloud.google.com/vertex-ai/publishers/meta/model-garden/llama-3.2-90b-vision-instruct-maas",
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"vertex_ai/meta/llama-3.2-90b-vision-instruct-maas": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "vertex_ai-llama_models",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 2048,
|
|
"max_tokens": 128000,
|
|
"metadata": {
|
|
"notes": "VertexAI states that The Llama 3.2 API service is at no cost during public preview, and will be priced as per dollar-per-1M-tokens at GA."
|
|
},
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.0,
|
|
"source": "https://console.cloud.google.com/vertex-ai/publishers/meta/model-garden/llama-3.2-90b-vision-instruct-maas",
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"vertex_ai/meta/llama-4-maverick-17b-128e-instruct-maas": {
|
|
"input_cost_per_token": 3.5e-07,
|
|
"litellm_provider": "vertex_ai-llama_models",
|
|
"max_input_tokens": 1000000,
|
|
"max_output_tokens": 1000000,
|
|
"max_tokens": 1000000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.15e-06,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/pricing#partner-models",
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text",
|
|
"code"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"vertex_ai/meta/llama-4-maverick-17b-16e-instruct-maas": {
|
|
"input_cost_per_token": 3.5e-07,
|
|
"litellm_provider": "vertex_ai-llama_models",
|
|
"max_input_tokens": 1000000,
|
|
"max_output_tokens": 1000000,
|
|
"max_tokens": 1000000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.15e-06,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/pricing#partner-models",
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text",
|
|
"code"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"vertex_ai/meta/llama-4-scout-17b-128e-instruct-maas": {
|
|
"input_cost_per_token": 2.5e-07,
|
|
"litellm_provider": "vertex_ai-llama_models",
|
|
"max_input_tokens": 10000000,
|
|
"max_output_tokens": 10000000,
|
|
"max_tokens": 10000000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7e-07,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/pricing#partner-models",
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text",
|
|
"code"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"vertex_ai/meta/llama-4-scout-17b-16e-instruct-maas": {
|
|
"input_cost_per_token": 2.5e-07,
|
|
"litellm_provider": "vertex_ai-llama_models",
|
|
"max_input_tokens": 10000000,
|
|
"max_output_tokens": 10000000,
|
|
"max_tokens": 10000000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 7e-07,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/pricing#partner-models",
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"text",
|
|
"code"
|
|
],
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"vertex_ai/meta/llama3-405b-instruct-maas": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "vertex_ai-llama_models",
|
|
"max_input_tokens": 32000,
|
|
"max_output_tokens": 32000,
|
|
"max_tokens": 32000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.0,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/pricing#partner-models",
|
|
"supports_tool_choice": true
|
|
},
|
|
"vertex_ai/meta/llama3-70b-instruct-maas": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "vertex_ai-llama_models",
|
|
"max_input_tokens": 32000,
|
|
"max_output_tokens": 32000,
|
|
"max_tokens": 32000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.0,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/pricing#partner-models",
|
|
"supports_tool_choice": true
|
|
},
|
|
"vertex_ai/meta/llama3-8b-instruct-maas": {
|
|
"input_cost_per_token": 0.0,
|
|
"litellm_provider": "vertex_ai-llama_models",
|
|
"max_input_tokens": 32000,
|
|
"max_output_tokens": 32000,
|
|
"max_tokens": 32000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.0,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/pricing#partner-models",
|
|
"supports_tool_choice": true
|
|
},
|
|
"vertex_ai/minimaxai/minimax-m2-maas": {
|
|
"input_cost_per_token": 3e-07,
|
|
"litellm_provider": "vertex_ai-minimax_models",
|
|
"max_input_tokens": 196608,
|
|
"max_output_tokens": 196608,
|
|
"max_tokens": 196608,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.2e-06,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/pricing#partner-models",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"vertex_ai/moonshotai/kimi-k2-thinking-maas": {
|
|
"input_cost_per_token": 6e-07,
|
|
"litellm_provider": "vertex_ai-moonshot_models",
|
|
"max_input_tokens": 256000,
|
|
"max_output_tokens": 256000,
|
|
"max_tokens": 256000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.5e-06,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/pricing#partner-models",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_web_search": true
|
|
},
|
|
"vertex_ai/mistral-medium-3": {
|
|
"input_cost_per_token": 4e-07,
|
|
"litellm_provider": "vertex_ai-mistral_models",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2e-06,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"vertex_ai/mistral-medium-3@001": {
|
|
"input_cost_per_token": 4e-07,
|
|
"litellm_provider": "vertex_ai-mistral_models",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2e-06,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"vertex_ai/mistralai/mistral-medium-3": {
|
|
"input_cost_per_token": 4e-07,
|
|
"litellm_provider": "vertex_ai-mistral_models",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2e-06,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"vertex_ai/mistralai/mistral-medium-3@001": {
|
|
"input_cost_per_token": 4e-07,
|
|
"litellm_provider": "vertex_ai-mistral_models",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2e-06,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"vertex_ai/mistral-large-2411": {
|
|
"input_cost_per_token": 2e-06,
|
|
"litellm_provider": "vertex_ai-mistral_models",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-06,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"vertex_ai/mistral-large@2407": {
|
|
"input_cost_per_token": 2e-06,
|
|
"litellm_provider": "vertex_ai-mistral_models",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-06,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"vertex_ai/mistral-large@2411-001": {
|
|
"input_cost_per_token": 2e-06,
|
|
"litellm_provider": "vertex_ai-mistral_models",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-06,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"vertex_ai/mistral-large@latest": {
|
|
"input_cost_per_token": 2e-06,
|
|
"litellm_provider": "vertex_ai-mistral_models",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-06,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"vertex_ai/mistral-nemo@2407": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "vertex_ai-mistral_models",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-06,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"vertex_ai/mistral-nemo@latest": {
|
|
"input_cost_per_token": 1.5e-07,
|
|
"litellm_provider": "vertex_ai-mistral_models",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-07,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"vertex_ai/mistral-small-2503": {
|
|
"input_cost_per_token": 1e-06,
|
|
"litellm_provider": "vertex_ai-mistral_models",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 128000,
|
|
"max_tokens": 128000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-06,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true
|
|
},
|
|
"vertex_ai/mistral-small-2503@001": {
|
|
"input_cost_per_token": 1e-06,
|
|
"litellm_provider": "vertex_ai-mistral_models",
|
|
"max_input_tokens": 32000,
|
|
"max_output_tokens": 8191,
|
|
"max_tokens": 8191,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-06,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"vertex_ai/mistral-ocr-2505": {
|
|
"litellm_provider": "vertex_ai",
|
|
"mode": "ocr",
|
|
"ocr_cost_per_page": 5e-4,
|
|
"supported_endpoints": [
|
|
"/v1/ocr"
|
|
],
|
|
"source": "https://cloud.google.com/generative-ai-app-builder/pricing"
|
|
},
|
|
"vertex_ai/openai/gpt-oss-120b-maas": {
|
|
"input_cost_per_token": 1.5e-07,
|
|
"litellm_provider": "vertex_ai-openai_models",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 6e-07,
|
|
"source": "https://console.cloud.google.com/vertex-ai/publishers/openai/model-garden/gpt-oss-120b-maas",
|
|
"supports_reasoning": true
|
|
},
|
|
"vertex_ai/openai/gpt-oss-20b-maas": {
|
|
"input_cost_per_token": 7.5e-08,
|
|
"litellm_provider": "vertex_ai-openai_models",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 3e-07,
|
|
"source": "https://console.cloud.google.com/vertex-ai/publishers/openai/model-garden/gpt-oss-120b-maas",
|
|
"supports_reasoning": true
|
|
},
|
|
"vertex_ai/qwen/qwen3-235b-a22b-instruct-2507-maas": {
|
|
"input_cost_per_token": 2.5e-07,
|
|
"litellm_provider": "vertex_ai-qwen_models",
|
|
"max_input_tokens": 262144,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 16384,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-06,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/pricing",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"vertex_ai/qwen/qwen3-coder-480b-a35b-instruct-maas": {
|
|
"input_cost_per_token": 1e-06,
|
|
"litellm_provider": "vertex_ai-qwen_models",
|
|
"max_input_tokens": 262144,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4e-06,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/pricing",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"vertex_ai/qwen/qwen3-next-80b-a3b-instruct-maas": {
|
|
"input_cost_per_token": 1.5e-07,
|
|
"litellm_provider": "vertex_ai-qwen_models",
|
|
"max_input_tokens": 262144,
|
|
"max_output_tokens": 262144,
|
|
"max_tokens": 262144,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.2e-06,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/pricing",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"vertex_ai/qwen/qwen3-next-80b-a3b-thinking-maas": {
|
|
"input_cost_per_token": 1.5e-07,
|
|
"litellm_provider": "vertex_ai-qwen_models",
|
|
"max_input_tokens": 262144,
|
|
"max_output_tokens": 262144,
|
|
"max_tokens": 262144,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.2e-06,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/pricing",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"vertex_ai/veo-2.0-generate-001": {
|
|
"litellm_provider": "vertex_ai-video-models",
|
|
"max_input_tokens": 1024,
|
|
"max_tokens": 1024,
|
|
"mode": "video_generation",
|
|
"output_cost_per_second": 0.35,
|
|
"source": "https://ai.google.dev/gemini-api/docs/video",
|
|
"supported_modalities": [
|
|
"text"
|
|
],
|
|
"supported_output_modalities": [
|
|
"video"
|
|
]
|
|
},
|
|
"vertex_ai/veo-3.0-fast-generate-preview": {
|
|
"litellm_provider": "vertex_ai-video-models",
|
|
"max_input_tokens": 1024,
|
|
"max_tokens": 1024,
|
|
"mode": "video_generation",
|
|
"output_cost_per_second": 0.15,
|
|
"source": "https://ai.google.dev/gemini-api/docs/video",
|
|
"supported_modalities": [
|
|
"text"
|
|
],
|
|
"supported_output_modalities": [
|
|
"video"
|
|
]
|
|
},
|
|
"vertex_ai/veo-3.0-generate-preview": {
|
|
"litellm_provider": "vertex_ai-video-models",
|
|
"max_input_tokens": 1024,
|
|
"max_tokens": 1024,
|
|
"mode": "video_generation",
|
|
"output_cost_per_second": 0.4,
|
|
"source": "https://ai.google.dev/gemini-api/docs/video",
|
|
"supported_modalities": [
|
|
"text"
|
|
],
|
|
"supported_output_modalities": [
|
|
"video"
|
|
]
|
|
},
|
|
"vertex_ai/veo-3.0-fast-generate-001": {
|
|
"litellm_provider": "vertex_ai-video-models",
|
|
"max_input_tokens": 1024,
|
|
"max_tokens": 1024,
|
|
"mode": "video_generation",
|
|
"output_cost_per_second": 0.15,
|
|
"source": "https://ai.google.dev/gemini-api/docs/video",
|
|
"supported_modalities": [
|
|
"text"
|
|
],
|
|
"supported_output_modalities": [
|
|
"video"
|
|
]
|
|
},
|
|
"vertex_ai/veo-3.0-generate-001": {
|
|
"litellm_provider": "vertex_ai-video-models",
|
|
"max_input_tokens": 1024,
|
|
"max_tokens": 1024,
|
|
"mode": "video_generation",
|
|
"output_cost_per_second": 0.4,
|
|
"source": "https://ai.google.dev/gemini-api/docs/video",
|
|
"supported_modalities": [
|
|
"text"
|
|
],
|
|
"supported_output_modalities": [
|
|
"video"
|
|
]
|
|
},
|
|
"vertex_ai/veo-3.1-generate-preview": {
|
|
"litellm_provider": "vertex_ai-video-models",
|
|
"max_input_tokens": 1024,
|
|
"max_tokens": 1024,
|
|
"mode": "video_generation",
|
|
"output_cost_per_second": 0.4,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/model-reference/veo",
|
|
"supported_modalities": [
|
|
"text"
|
|
],
|
|
"supported_output_modalities": [
|
|
"video"
|
|
]
|
|
},
|
|
"vertex_ai/veo-3.1-fast-generate-preview": {
|
|
"litellm_provider": "vertex_ai-video-models",
|
|
"max_input_tokens": 1024,
|
|
"max_tokens": 1024,
|
|
"mode": "video_generation",
|
|
"output_cost_per_second": 0.15,
|
|
"source": "https://cloud.google.com/vertex-ai/generative-ai/docs/model-reference/veo",
|
|
"supported_modalities": [
|
|
"text"
|
|
],
|
|
"supported_output_modalities": [
|
|
"video"
|
|
]
|
|
},
|
|
"voyage/rerank-2": {
|
|
"input_cost_per_query": 5e-08,
|
|
"input_cost_per_token": 5e-08,
|
|
"litellm_provider": "voyage",
|
|
"max_input_tokens": 16000,
|
|
"max_output_tokens": 16000,
|
|
"max_query_tokens": 16000,
|
|
"max_tokens": 16000,
|
|
"mode": "rerank",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"voyage/rerank-2-lite": {
|
|
"input_cost_per_query": 2e-08,
|
|
"input_cost_per_token": 2e-08,
|
|
"litellm_provider": "voyage",
|
|
"max_input_tokens": 8000,
|
|
"max_output_tokens": 8000,
|
|
"max_query_tokens": 8000,
|
|
"max_tokens": 8000,
|
|
"mode": "rerank",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"voyage/voyage-2": {
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "voyage",
|
|
"max_input_tokens": 4000,
|
|
"max_tokens": 4000,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"voyage/voyage-3": {
|
|
"input_cost_per_token": 6e-08,
|
|
"litellm_provider": "voyage",
|
|
"max_input_tokens": 32000,
|
|
"max_tokens": 32000,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"voyage/voyage-3-large": {
|
|
"input_cost_per_token": 1.8e-07,
|
|
"litellm_provider": "voyage",
|
|
"max_input_tokens": 32000,
|
|
"max_tokens": 32000,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"voyage/voyage-3-lite": {
|
|
"input_cost_per_token": 2e-08,
|
|
"litellm_provider": "voyage",
|
|
"max_input_tokens": 32000,
|
|
"max_tokens": 32000,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"voyage/voyage-3.5": {
|
|
"input_cost_per_token": 6e-08,
|
|
"litellm_provider": "voyage",
|
|
"max_input_tokens": 32000,
|
|
"max_tokens": 32000,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"voyage/voyage-3.5-lite": {
|
|
"input_cost_per_token": 2e-08,
|
|
"litellm_provider": "voyage",
|
|
"max_input_tokens": 32000,
|
|
"max_tokens": 32000,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"voyage/voyage-code-2": {
|
|
"input_cost_per_token": 1.2e-07,
|
|
"litellm_provider": "voyage",
|
|
"max_input_tokens": 16000,
|
|
"max_tokens": 16000,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"voyage/voyage-code-3": {
|
|
"input_cost_per_token": 1.8e-07,
|
|
"litellm_provider": "voyage",
|
|
"max_input_tokens": 32000,
|
|
"max_tokens": 32000,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"voyage/voyage-context-3": {
|
|
"input_cost_per_token": 1.8e-07,
|
|
"litellm_provider": "voyage",
|
|
"max_input_tokens": 120000,
|
|
"max_tokens": 120000,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"voyage/voyage-finance-2": {
|
|
"input_cost_per_token": 1.2e-07,
|
|
"litellm_provider": "voyage",
|
|
"max_input_tokens": 32000,
|
|
"max_tokens": 32000,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"voyage/voyage-large-2": {
|
|
"input_cost_per_token": 1.2e-07,
|
|
"litellm_provider": "voyage",
|
|
"max_input_tokens": 16000,
|
|
"max_tokens": 16000,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"voyage/voyage-law-2": {
|
|
"input_cost_per_token": 1.2e-07,
|
|
"litellm_provider": "voyage",
|
|
"max_input_tokens": 16000,
|
|
"max_tokens": 16000,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"voyage/voyage-lite-01": {
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "voyage",
|
|
"max_input_tokens": 4096,
|
|
"max_tokens": 4096,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"voyage/voyage-lite-02-instruct": {
|
|
"input_cost_per_token": 1e-07,
|
|
"litellm_provider": "voyage",
|
|
"max_input_tokens": 4000,
|
|
"max_tokens": 4000,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"voyage/voyage-multimodal-3": {
|
|
"input_cost_per_token": 1.2e-07,
|
|
"litellm_provider": "voyage",
|
|
"max_input_tokens": 32000,
|
|
"max_tokens": 32000,
|
|
"mode": "embedding",
|
|
"output_cost_per_token": 0.0
|
|
},
|
|
"wandb/openai/gpt-oss-120b": {
|
|
"max_tokens": 131072,
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"input_cost_per_token": 0.015,
|
|
"output_cost_per_token": 0.06,
|
|
"litellm_provider": "wandb",
|
|
"mode": "chat"
|
|
},
|
|
"wandb/openai/gpt-oss-20b": {
|
|
"max_tokens": 131072,
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"input_cost_per_token": 0.005,
|
|
"output_cost_per_token": 0.02,
|
|
"litellm_provider": "wandb",
|
|
"mode": "chat"
|
|
},
|
|
"wandb/zai-org/GLM-4.5": {
|
|
"max_tokens": 131072,
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"input_cost_per_token": 0.055,
|
|
"output_cost_per_token": 0.2,
|
|
"litellm_provider": "wandb",
|
|
"mode": "chat"
|
|
},
|
|
"wandb/Qwen/Qwen3-235B-A22B-Instruct-2507": {
|
|
"max_tokens": 262144,
|
|
"max_input_tokens": 262144,
|
|
"max_output_tokens": 262144,
|
|
"input_cost_per_token": 0.01,
|
|
"output_cost_per_token": 0.01,
|
|
"litellm_provider": "wandb",
|
|
"mode": "chat"
|
|
},
|
|
"wandb/Qwen/Qwen3-Coder-480B-A35B-Instruct": {
|
|
"max_tokens": 262144,
|
|
"max_input_tokens": 262144,
|
|
"max_output_tokens": 262144,
|
|
"input_cost_per_token": 0.1,
|
|
"output_cost_per_token": 0.15,
|
|
"litellm_provider": "wandb",
|
|
"mode": "chat"
|
|
},
|
|
"wandb/Qwen/Qwen3-235B-A22B-Thinking-2507": {
|
|
"max_tokens": 262144,
|
|
"max_input_tokens": 262144,
|
|
"max_output_tokens": 262144,
|
|
"input_cost_per_token": 0.01,
|
|
"output_cost_per_token": 0.01,
|
|
"litellm_provider": "wandb",
|
|
"mode": "chat"
|
|
},
|
|
"wandb/moonshotai/Kimi-K2-Instruct": {
|
|
"max_tokens": 128000,
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 128000,
|
|
"input_cost_per_token": 6e-07,
|
|
"output_cost_per_token": 2.5e-06,
|
|
"litellm_provider": "wandb",
|
|
"mode": "chat"
|
|
},
|
|
"wandb/meta-llama/Llama-3.1-8B-Instruct": {
|
|
"max_tokens": 128000,
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 128000,
|
|
"input_cost_per_token": 0.022,
|
|
"output_cost_per_token": 0.022,
|
|
"litellm_provider": "wandb",
|
|
"mode": "chat"
|
|
},
|
|
"wandb/deepseek-ai/DeepSeek-V3.1": {
|
|
"max_tokens": 128000,
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 128000,
|
|
"input_cost_per_token": 0.055,
|
|
"output_cost_per_token": 0.165,
|
|
"litellm_provider": "wandb",
|
|
"mode": "chat"
|
|
},
|
|
"wandb/deepseek-ai/DeepSeek-R1-0528": {
|
|
"max_tokens": 161000,
|
|
"max_input_tokens": 161000,
|
|
"max_output_tokens": 161000,
|
|
"input_cost_per_token": 0.135,
|
|
"output_cost_per_token": 0.54,
|
|
"litellm_provider": "wandb",
|
|
"mode": "chat"
|
|
},
|
|
"wandb/deepseek-ai/DeepSeek-V3-0324": {
|
|
"max_tokens": 161000,
|
|
"max_input_tokens": 161000,
|
|
"max_output_tokens": 161000,
|
|
"input_cost_per_token": 0.114,
|
|
"output_cost_per_token": 0.275,
|
|
"litellm_provider": "wandb",
|
|
"mode": "chat"
|
|
},
|
|
"wandb/meta-llama/Llama-3.3-70B-Instruct": {
|
|
"max_tokens": 128000,
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 128000,
|
|
"input_cost_per_token": 0.071,
|
|
"output_cost_per_token": 0.071,
|
|
"litellm_provider": "wandb",
|
|
"mode": "chat"
|
|
},
|
|
"wandb/meta-llama/Llama-4-Scout-17B-16E-Instruct": {
|
|
"max_tokens": 64000,
|
|
"max_input_tokens": 64000,
|
|
"max_output_tokens": 64000,
|
|
"input_cost_per_token": 0.017,
|
|
"output_cost_per_token": 0.066,
|
|
"litellm_provider": "wandb",
|
|
"mode": "chat"
|
|
},
|
|
"wandb/microsoft/Phi-4-mini-instruct": {
|
|
"max_tokens": 128000,
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 128000,
|
|
"input_cost_per_token": 0.008,
|
|
"output_cost_per_token": 0.035,
|
|
"litellm_provider": "wandb",
|
|
"mode": "chat"
|
|
},
|
|
"watsonx/ibm/granite-3-8b-instruct": {
|
|
"input_cost_per_token": 0.2e-06,
|
|
"litellm_provider": "watsonx",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 1024,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.2e-06,
|
|
"supports_audio_input": false,
|
|
"supports_audio_output": false,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": false,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": false
|
|
},
|
|
"watsonx/mistralai/mistral-large": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "watsonx",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 16384,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 10e-06,
|
|
"supports_audio_input": false,
|
|
"supports_audio_output": false,
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": false,
|
|
"supports_prompt_caching": true,
|
|
"supports_response_schema": true,
|
|
"supports_system_messages": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": false
|
|
},
|
|
"watsonx/bigscience/mt0-xxl-13b": {
|
|
"max_tokens": 8192,
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"input_cost_per_token": 0.0005,
|
|
"output_cost_per_token": 0.002,
|
|
"litellm_provider": "watsonx",
|
|
"mode": "chat",
|
|
"supports_function_calling": false,
|
|
"supports_parallel_function_calling": false,
|
|
"supports_vision": false
|
|
},
|
|
"watsonx/core42/jais-13b-chat": {
|
|
"max_tokens": 8192,
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"input_cost_per_token": 0.0005,
|
|
"output_cost_per_token": 0.002,
|
|
"litellm_provider": "watsonx",
|
|
"mode": "chat",
|
|
"supports_function_calling": false,
|
|
"supports_parallel_function_calling": false,
|
|
"supports_vision": false
|
|
},
|
|
"watsonx/google/flan-t5-xl-3b": {
|
|
"max_tokens": 8192,
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"input_cost_per_token": 0.6e-06,
|
|
"output_cost_per_token": 0.6e-06,
|
|
"litellm_provider": "watsonx",
|
|
"mode": "chat",
|
|
"supports_function_calling": false,
|
|
"supports_parallel_function_calling": false,
|
|
"supports_vision": false
|
|
},
|
|
"watsonx/ibm/granite-13b-chat-v2": {
|
|
"max_tokens": 8192,
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"input_cost_per_token": 0.6e-06,
|
|
"output_cost_per_token": 0.6e-06,
|
|
"litellm_provider": "watsonx",
|
|
"mode": "chat",
|
|
"supports_function_calling": false,
|
|
"supports_parallel_function_calling": false,
|
|
"supports_vision": false
|
|
},
|
|
"watsonx/ibm/granite-13b-instruct-v2": {
|
|
"max_tokens": 8192,
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"input_cost_per_token": 0.6e-06,
|
|
"output_cost_per_token": 0.6e-06,
|
|
"litellm_provider": "watsonx",
|
|
"mode": "chat",
|
|
"supports_function_calling": false,
|
|
"supports_parallel_function_calling": false,
|
|
"supports_vision": false
|
|
},
|
|
"watsonx/ibm/granite-3-3-8b-instruct": {
|
|
"max_tokens": 8192,
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"input_cost_per_token": 0.2e-06,
|
|
"output_cost_per_token": 0.2e-06,
|
|
"litellm_provider": "watsonx",
|
|
"mode": "chat",
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_vision": false
|
|
},
|
|
"watsonx/ibm/granite-4-h-small": {
|
|
"max_tokens": 20480,
|
|
"max_input_tokens": 20480,
|
|
"max_output_tokens": 20480,
|
|
"input_cost_per_token": 0.06e-06,
|
|
"output_cost_per_token": 0.25e-06,
|
|
"litellm_provider": "watsonx",
|
|
"mode": "chat",
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_vision": false
|
|
},
|
|
"watsonx/ibm/granite-guardian-3-2-2b": {
|
|
"max_tokens": 8192,
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"input_cost_per_token": 0.1e-06,
|
|
"output_cost_per_token": 0.1e-06,
|
|
"litellm_provider": "watsonx",
|
|
"mode": "chat",
|
|
"supports_function_calling": false,
|
|
"supports_parallel_function_calling": false,
|
|
"supports_vision": false
|
|
},
|
|
"watsonx/ibm/granite-guardian-3-3-8b": {
|
|
"max_tokens": 8192,
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"input_cost_per_token": 0.2e-06,
|
|
"output_cost_per_token": 0.2e-06,
|
|
"litellm_provider": "watsonx",
|
|
"mode": "chat",
|
|
"supports_function_calling": false,
|
|
"supports_parallel_function_calling": false,
|
|
"supports_vision": false
|
|
},
|
|
"watsonx/ibm/granite-ttm-1024-96-r2": {
|
|
"max_tokens": 512,
|
|
"max_input_tokens": 512,
|
|
"max_output_tokens": 512,
|
|
"input_cost_per_token": 0.38e-06,
|
|
"output_cost_per_token": 0.38e-06,
|
|
"litellm_provider": "watsonx",
|
|
"mode": "chat",
|
|
"supports_function_calling": false,
|
|
"supports_parallel_function_calling": false,
|
|
"supports_vision": false
|
|
},
|
|
"watsonx/ibm/granite-ttm-1536-96-r2": {
|
|
"max_tokens": 512,
|
|
"max_input_tokens": 512,
|
|
"max_output_tokens": 512,
|
|
"input_cost_per_token": 0.38e-06,
|
|
"output_cost_per_token": 0.38e-06,
|
|
"litellm_provider": "watsonx",
|
|
"mode": "chat",
|
|
"supports_function_calling": false,
|
|
"supports_parallel_function_calling": false,
|
|
"supports_vision": false
|
|
},
|
|
"watsonx/ibm/granite-ttm-512-96-r2": {
|
|
"max_tokens": 512,
|
|
"max_input_tokens": 512,
|
|
"max_output_tokens": 512,
|
|
"input_cost_per_token": 0.38e-06,
|
|
"output_cost_per_token": 0.38e-06,
|
|
"litellm_provider": "watsonx",
|
|
"mode": "chat",
|
|
"supports_function_calling": false,
|
|
"supports_parallel_function_calling": false,
|
|
"supports_vision": false
|
|
},
|
|
"watsonx/ibm/granite-vision-3-2-2b": {
|
|
"max_tokens": 8192,
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"input_cost_per_token": 0.1e-06,
|
|
"output_cost_per_token": 0.1e-06,
|
|
"litellm_provider": "watsonx",
|
|
"mode": "chat",
|
|
"supports_function_calling": false,
|
|
"supports_parallel_function_calling": false,
|
|
"supports_vision": true
|
|
},
|
|
"watsonx/meta-llama/llama-3-2-11b-vision-instruct": {
|
|
"max_tokens": 128000,
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 128000,
|
|
"input_cost_per_token": 0.35e-06,
|
|
"output_cost_per_token": 0.35e-06,
|
|
"litellm_provider": "watsonx",
|
|
"mode": "chat",
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_vision": true
|
|
},
|
|
"watsonx/meta-llama/llama-3-2-1b-instruct": {
|
|
"max_tokens": 128000,
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 128000,
|
|
"input_cost_per_token": 0.1e-06,
|
|
"output_cost_per_token": 0.1e-06,
|
|
"litellm_provider": "watsonx",
|
|
"mode": "chat",
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_vision": false
|
|
},
|
|
"watsonx/meta-llama/llama-3-2-3b-instruct": {
|
|
"max_tokens": 128000,
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 128000,
|
|
"input_cost_per_token": 0.15e-06,
|
|
"output_cost_per_token": 0.15e-06,
|
|
"litellm_provider": "watsonx",
|
|
"mode": "chat",
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_vision": false
|
|
},
|
|
"watsonx/meta-llama/llama-3-2-90b-vision-instruct": {
|
|
"max_tokens": 128000,
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 128000,
|
|
"input_cost_per_token": 2e-06,
|
|
"output_cost_per_token": 2e-06,
|
|
"litellm_provider": "watsonx",
|
|
"mode": "chat",
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_vision": true
|
|
},
|
|
"watsonx/meta-llama/llama-3-3-70b-instruct": {
|
|
"max_tokens": 128000,
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 128000,
|
|
"input_cost_per_token": 0.71e-06,
|
|
"output_cost_per_token": 0.71e-06,
|
|
"litellm_provider": "watsonx",
|
|
"mode": "chat",
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_vision": false
|
|
},
|
|
"watsonx/meta-llama/llama-4-maverick-17b": {
|
|
"max_tokens": 128000,
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 128000,
|
|
"input_cost_per_token": 0.35e-06,
|
|
"output_cost_per_token": 1.4e-06,
|
|
"litellm_provider": "watsonx",
|
|
"mode": "chat",
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_vision": false
|
|
},
|
|
"watsonx/meta-llama/llama-guard-3-11b-vision": {
|
|
"max_tokens": 128000,
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 128000,
|
|
"input_cost_per_token": 0.35e-06,
|
|
"output_cost_per_token": 0.35e-06,
|
|
"litellm_provider": "watsonx",
|
|
"mode": "chat",
|
|
"supports_function_calling": false,
|
|
"supports_parallel_function_calling": false,
|
|
"supports_vision": true
|
|
},
|
|
"watsonx/mistralai/mistral-medium-2505": {
|
|
"max_tokens": 128000,
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 128000,
|
|
"input_cost_per_token": 3e-06,
|
|
"output_cost_per_token": 10e-06,
|
|
"litellm_provider": "watsonx",
|
|
"mode": "chat",
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_vision": false
|
|
},
|
|
"watsonx/mistralai/mistral-small-2503": {
|
|
"max_tokens": 32000,
|
|
"max_input_tokens": 32000,
|
|
"max_output_tokens": 32000,
|
|
"input_cost_per_token": 0.1e-06,
|
|
"output_cost_per_token": 0.3e-06,
|
|
"litellm_provider": "watsonx",
|
|
"mode": "chat",
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_vision": false
|
|
},
|
|
"watsonx/mistralai/mistral-small-3-1-24b-instruct-2503": {
|
|
"max_tokens": 32000,
|
|
"max_input_tokens": 32000,
|
|
"max_output_tokens": 32000,
|
|
"input_cost_per_token": 0.1e-06,
|
|
"output_cost_per_token": 0.3e-06,
|
|
"litellm_provider": "watsonx",
|
|
"mode": "chat",
|
|
"supports_function_calling": true,
|
|
"supports_parallel_function_calling": true,
|
|
"supports_vision": false
|
|
},
|
|
"watsonx/mistralai/pixtral-12b-2409": {
|
|
"max_tokens": 128000,
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 128000,
|
|
"input_cost_per_token": 0.35e-06,
|
|
"output_cost_per_token": 0.35e-06,
|
|
"litellm_provider": "watsonx",
|
|
"mode": "chat",
|
|
"supports_function_calling": false,
|
|
"supports_parallel_function_calling": false,
|
|
"supports_vision": true
|
|
},
|
|
"watsonx/openai/gpt-oss-120b": {
|
|
"max_tokens": 8192,
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"input_cost_per_token": 0.15e-06,
|
|
"output_cost_per_token": 0.6e-06,
|
|
"litellm_provider": "watsonx",
|
|
"mode": "chat",
|
|
"supports_function_calling": false,
|
|
"supports_parallel_function_calling": false,
|
|
"supports_vision": false
|
|
},
|
|
"watsonx/sdaia/allam-1-13b-instruct": {
|
|
"max_tokens": 8192,
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"input_cost_per_token": 1.8e-06,
|
|
"output_cost_per_token": 1.8e-06,
|
|
"litellm_provider": "watsonx",
|
|
"mode": "chat",
|
|
"supports_function_calling": false,
|
|
"supports_parallel_function_calling": false,
|
|
"supports_vision": false
|
|
},
|
|
"watsonx/whisper-large-v3-turbo": {
|
|
"input_cost_per_second": 0.0001,
|
|
"output_cost_per_second": 0.0001,
|
|
"litellm_provider": "watsonx",
|
|
"mode": "audio_transcription",
|
|
"supported_endpoints": [
|
|
"/v1/audio/transcriptions"
|
|
]
|
|
},
|
|
"whisper-1": {
|
|
"input_cost_per_second": 0.0001,
|
|
"litellm_provider": "openai",
|
|
"mode": "audio_transcription",
|
|
"output_cost_per_second": 0.0001,
|
|
"supported_endpoints": [
|
|
"/v1/audio/transcriptions"
|
|
]
|
|
},
|
|
"xai/grok-2": {
|
|
"input_cost_per_token": 2e-06,
|
|
"litellm_provider": "xai",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-05,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_web_search": true
|
|
},
|
|
"xai/grok-2-1212": {
|
|
"input_cost_per_token": 2e-06,
|
|
"litellm_provider": "xai",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-05,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_web_search": true
|
|
},
|
|
"xai/grok-2-latest": {
|
|
"input_cost_per_token": 2e-06,
|
|
"litellm_provider": "xai",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-05,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_web_search": true
|
|
},
|
|
"xai/grok-2-vision": {
|
|
"input_cost_per_image": 2e-06,
|
|
"input_cost_per_token": 2e-06,
|
|
"litellm_provider": "xai",
|
|
"max_input_tokens": 32768,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-05,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true
|
|
},
|
|
"xai/grok-2-vision-1212": {
|
|
"input_cost_per_image": 2e-06,
|
|
"input_cost_per_token": 2e-06,
|
|
"litellm_provider": "xai",
|
|
"max_input_tokens": 32768,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-05,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true
|
|
},
|
|
"xai/grok-2-vision-latest": {
|
|
"input_cost_per_image": 2e-06,
|
|
"input_cost_per_token": 2e-06,
|
|
"litellm_provider": "xai",
|
|
"max_input_tokens": 32768,
|
|
"max_output_tokens": 32768,
|
|
"max_tokens": 32768,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1e-05,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true
|
|
},
|
|
"xai/grok-3": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "xai",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"source": "https://x.ai/api#pricing",
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": false,
|
|
"supports_tool_choice": true,
|
|
"supports_web_search": true
|
|
},
|
|
"xai/grok-3-beta": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "xai",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"source": "https://x.ai/api#pricing",
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": false,
|
|
"supports_tool_choice": true,
|
|
"supports_web_search": true
|
|
},
|
|
"xai/grok-3-fast-beta": {
|
|
"input_cost_per_token": 5e-06,
|
|
"litellm_provider": "xai",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.5e-05,
|
|
"source": "https://x.ai/api#pricing",
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": false,
|
|
"supports_tool_choice": true,
|
|
"supports_web_search": true
|
|
},
|
|
"xai/grok-3-fast-latest": {
|
|
"input_cost_per_token": 5e-06,
|
|
"litellm_provider": "xai",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 2.5e-05,
|
|
"source": "https://x.ai/api#pricing",
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": false,
|
|
"supports_tool_choice": true,
|
|
"supports_web_search": true
|
|
},
|
|
"xai/grok-3-latest": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "xai",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"source": "https://x.ai/api#pricing",
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": false,
|
|
"supports_tool_choice": true,
|
|
"supports_web_search": true
|
|
},
|
|
"xai/grok-3-mini": {
|
|
"input_cost_per_token": 3e-07,
|
|
"litellm_provider": "xai",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 5e-07,
|
|
"source": "https://x.ai/api#pricing",
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": false,
|
|
"supports_tool_choice": true,
|
|
"supports_web_search": true
|
|
},
|
|
"xai/grok-3-mini-beta": {
|
|
"input_cost_per_token": 3e-07,
|
|
"litellm_provider": "xai",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 5e-07,
|
|
"source": "https://x.ai/api#pricing",
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": false,
|
|
"supports_tool_choice": true,
|
|
"supports_web_search": true
|
|
},
|
|
"xai/grok-3-mini-fast": {
|
|
"input_cost_per_token": 6e-07,
|
|
"litellm_provider": "xai",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4e-06,
|
|
"source": "https://x.ai/api#pricing",
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": false,
|
|
"supports_tool_choice": true,
|
|
"supports_web_search": true
|
|
},
|
|
"xai/grok-3-mini-fast-beta": {
|
|
"input_cost_per_token": 6e-07,
|
|
"litellm_provider": "xai",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4e-06,
|
|
"source": "https://x.ai/api#pricing",
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": false,
|
|
"supports_tool_choice": true,
|
|
"supports_web_search": true
|
|
},
|
|
"xai/grok-3-mini-fast-latest": {
|
|
"input_cost_per_token": 6e-07,
|
|
"litellm_provider": "xai",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 4e-06,
|
|
"source": "https://x.ai/api#pricing",
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": false,
|
|
"supports_tool_choice": true,
|
|
"supports_web_search": true
|
|
},
|
|
"xai/grok-3-mini-latest": {
|
|
"input_cost_per_token": 3e-07,
|
|
"litellm_provider": "xai",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 5e-07,
|
|
"source": "https://x.ai/api#pricing",
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": false,
|
|
"supports_tool_choice": true,
|
|
"supports_web_search": true
|
|
},
|
|
"xai/grok-4": {
|
|
"input_cost_per_token": 3e-06,
|
|
"litellm_provider": "xai",
|
|
"max_input_tokens": 256000,
|
|
"max_output_tokens": 256000,
|
|
"max_tokens": 256000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"source": "https://docs.x.ai/docs/models",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_web_search": true
|
|
},
|
|
"xai/grok-4-fast-reasoning": {
|
|
"litellm_provider": "xai",
|
|
"max_input_tokens": 2e6,
|
|
"max_output_tokens": 2e6,
|
|
"max_tokens": 2e6,
|
|
"mode": "chat",
|
|
"input_cost_per_token": 0.2e-06,
|
|
"input_cost_per_token_above_128k_tokens": 0.4e-06,
|
|
"output_cost_per_token": 0.5e-06,
|
|
"output_cost_per_token_above_128k_tokens": 1e-06,
|
|
"cache_read_input_token_cost": 0.05e-06,
|
|
"source": "https://docs.x.ai/docs/models",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_web_search": true
|
|
},
|
|
"xai/grok-4-fast-non-reasoning": {
|
|
"litellm_provider": "xai",
|
|
"max_input_tokens": 2e6,
|
|
"max_output_tokens": 2e6,
|
|
"cache_read_input_token_cost": 0.05e-06,
|
|
"max_tokens": 2e6,
|
|
"mode": "chat",
|
|
"input_cost_per_token": 0.2e-06,
|
|
"input_cost_per_token_above_128k_tokens": 0.4e-06,
|
|
"output_cost_per_token": 0.5e-06,
|
|
"output_cost_per_token_above_128k_tokens": 1e-06,
|
|
"source": "https://docs.x.ai/docs/models",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_web_search": true
|
|
},
|
|
"xai/grok-4-0709": {
|
|
"input_cost_per_token": 3e-06,
|
|
"input_cost_per_token_above_128k_tokens": 6e-06,
|
|
"litellm_provider": "xai",
|
|
"max_input_tokens": 256000,
|
|
"max_output_tokens": 256000,
|
|
"max_tokens": 256000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"output_cost_per_token_above_128k_tokens": 30e-06,
|
|
"source": "https://docs.x.ai/docs/models",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_web_search": true
|
|
},
|
|
"xai/grok-4-latest": {
|
|
"input_cost_per_token": 3e-06,
|
|
"input_cost_per_token_above_128k_tokens": 6e-06,
|
|
"litellm_provider": "xai",
|
|
"max_input_tokens": 256000,
|
|
"max_output_tokens": 256000,
|
|
"max_tokens": 256000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"output_cost_per_token_above_128k_tokens": 30e-06,
|
|
"source": "https://docs.x.ai/docs/models",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_web_search": true
|
|
},
|
|
"xai/grok-4-1-fast": {
|
|
"cache_read_input_token_cost": 0.05e-06,
|
|
"input_cost_per_token": 0.2e-06,
|
|
"input_cost_per_token_above_128k_tokens": 0.4e-06,
|
|
"litellm_provider": "xai",
|
|
"max_input_tokens": 2e6,
|
|
"max_output_tokens": 2e6,
|
|
"max_tokens": 2e6,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.5e-06,
|
|
"output_cost_per_token_above_128k_tokens": 1e-06,
|
|
"source": "https://docs.x.ai/docs/models/grok-4-1-fast-reasoning",
|
|
"supports_audio_input": true,
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true
|
|
},
|
|
"xai/grok-4-1-fast-reasoning": {
|
|
"cache_read_input_token_cost": 0.05e-06,
|
|
"input_cost_per_token": 0.2e-06,
|
|
"input_cost_per_token_above_128k_tokens": 0.4e-06,
|
|
"litellm_provider": "xai",
|
|
"max_input_tokens": 2e6,
|
|
"max_output_tokens": 2e6,
|
|
"max_tokens": 2e6,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.5e-06,
|
|
"output_cost_per_token_above_128k_tokens": 1e-06,
|
|
"source": "https://docs.x.ai/docs/models/grok-4-1-fast-reasoning",
|
|
"supports_audio_input": true,
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true
|
|
},
|
|
"xai/grok-4-1-fast-reasoning-latest": {
|
|
"cache_read_input_token_cost": 0.05e-06,
|
|
"input_cost_per_token": 0.2e-06,
|
|
"input_cost_per_token_above_128k_tokens": 0.4e-06,
|
|
"litellm_provider": "xai",
|
|
"max_input_tokens": 2e6,
|
|
"max_output_tokens": 2e6,
|
|
"max_tokens": 2e6,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.5e-06,
|
|
"output_cost_per_token_above_128k_tokens": 1e-06,
|
|
"source": "https://docs.x.ai/docs/models/grok-4-1-fast-reasoning",
|
|
"supports_audio_input": true,
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true
|
|
},
|
|
"xai/grok-4-1-fast-non-reasoning": {
|
|
"cache_read_input_token_cost": 0.05e-06,
|
|
"input_cost_per_token": 0.2e-06,
|
|
"input_cost_per_token_above_128k_tokens": 0.4e-06,
|
|
"litellm_provider": "xai",
|
|
"max_input_tokens": 2e6,
|
|
"max_output_tokens": 2e6,
|
|
"max_tokens": 2e6,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.5e-06,
|
|
"output_cost_per_token_above_128k_tokens": 1e-06,
|
|
"source": "https://docs.x.ai/docs/models/grok-4-1-fast-non-reasoning",
|
|
"supports_audio_input": true,
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true
|
|
},
|
|
"xai/grok-4-1-fast-non-reasoning-latest": {
|
|
"cache_read_input_token_cost": 0.05e-06,
|
|
"input_cost_per_token": 0.2e-06,
|
|
"input_cost_per_token_above_128k_tokens": 0.4e-06,
|
|
"litellm_provider": "xai",
|
|
"max_input_tokens": 2e6,
|
|
"max_output_tokens": 2e6,
|
|
"max_tokens": 2e6,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 0.5e-06,
|
|
"output_cost_per_token_above_128k_tokens": 1e-06,
|
|
"source": "https://docs.x.ai/docs/models/grok-4-1-fast-non-reasoning",
|
|
"supports_audio_input": true,
|
|
"supports_function_calling": true,
|
|
"supports_response_schema": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true
|
|
},
|
|
"xai/grok-beta": {
|
|
"input_cost_per_token": 5e-06,
|
|
"litellm_provider": "xai",
|
|
"max_input_tokens": 131072,
|
|
"max_output_tokens": 131072,
|
|
"max_tokens": 131072,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true
|
|
},
|
|
"xai/grok-code-fast": {
|
|
"cache_read_input_token_cost": 2e-08,
|
|
"input_cost_per_token": 2e-07,
|
|
"litellm_provider": "xai",
|
|
"max_input_tokens": 256000,
|
|
"max_output_tokens": 256000,
|
|
"max_tokens": 256000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-06,
|
|
"source": "https://docs.x.ai/docs/models",
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"xai/grok-code-fast-1": {
|
|
"cache_read_input_token_cost": 2e-08,
|
|
"input_cost_per_token": 2e-07,
|
|
"litellm_provider": "xai",
|
|
"max_input_tokens": 256000,
|
|
"max_output_tokens": 256000,
|
|
"max_tokens": 256000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-06,
|
|
"source": "https://docs.x.ai/docs/models",
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"xai/grok-code-fast-1-0825": {
|
|
"cache_read_input_token_cost": 2e-08,
|
|
"input_cost_per_token": 2e-07,
|
|
"litellm_provider": "xai",
|
|
"max_input_tokens": 256000,
|
|
"max_output_tokens": 256000,
|
|
"max_tokens": 256000,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-06,
|
|
"source": "https://docs.x.ai/docs/models",
|
|
"supports_function_calling": true,
|
|
"supports_reasoning": true,
|
|
"supports_tool_choice": true
|
|
},
|
|
"xai/grok-vision-beta": {
|
|
"input_cost_per_image": 5e-06,
|
|
"input_cost_per_token": 5e-06,
|
|
"litellm_provider": "xai",
|
|
"max_input_tokens": 8192,
|
|
"max_output_tokens": 8192,
|
|
"max_tokens": 8192,
|
|
"mode": "chat",
|
|
"output_cost_per_token": 1.5e-05,
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"supports_web_search": true
|
|
},
|
|
"zai/glm-4.6": {
|
|
"input_cost_per_token": 6e-07,
|
|
"output_cost_per_token": 2.2e-06,
|
|
"litellm_provider": "zai",
|
|
"max_input_tokens": 200000,
|
|
"max_output_tokens": 128000,
|
|
"mode": "chat",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"source": "https://docs.z.ai/guides/overview/pricing"
|
|
},
|
|
"zai/glm-4.5": {
|
|
"input_cost_per_token": 6e-07,
|
|
"output_cost_per_token": 2.2e-06,
|
|
"litellm_provider": "zai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 32000,
|
|
"mode": "chat",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"source": "https://docs.z.ai/guides/overview/pricing"
|
|
},
|
|
"zai/glm-4.5v": {
|
|
"input_cost_per_token": 6e-07,
|
|
"output_cost_per_token": 1.8e-06,
|
|
"litellm_provider": "zai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 32000,
|
|
"mode": "chat",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"supports_vision": true,
|
|
"source": "https://docs.z.ai/guides/overview/pricing"
|
|
},
|
|
"zai/glm-4.5-x": {
|
|
"input_cost_per_token": 2.2e-06,
|
|
"output_cost_per_token": 8.9e-06,
|
|
"litellm_provider": "zai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 32000,
|
|
"mode": "chat",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"source": "https://docs.z.ai/guides/overview/pricing"
|
|
},
|
|
"zai/glm-4.5-air": {
|
|
"input_cost_per_token": 2e-07,
|
|
"output_cost_per_token": 1.1e-06,
|
|
"litellm_provider": "zai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 32000,
|
|
"mode": "chat",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"source": "https://docs.z.ai/guides/overview/pricing"
|
|
},
|
|
"zai/glm-4.5-airx": {
|
|
"input_cost_per_token": 1.1e-06,
|
|
"output_cost_per_token": 4.5e-06,
|
|
"litellm_provider": "zai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 32000,
|
|
"mode": "chat",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"source": "https://docs.z.ai/guides/overview/pricing"
|
|
},
|
|
"zai/glm-4-32b-0414-128k": {
|
|
"input_cost_per_token": 1e-07,
|
|
"output_cost_per_token": 1e-07,
|
|
"litellm_provider": "zai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 32000,
|
|
"mode": "chat",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"source": "https://docs.z.ai/guides/overview/pricing"
|
|
},
|
|
"zai/glm-4.5-flash": {
|
|
"input_cost_per_token": 0,
|
|
"output_cost_per_token": 0,
|
|
"litellm_provider": "zai",
|
|
"max_input_tokens": 128000,
|
|
"max_output_tokens": 32000,
|
|
"mode": "chat",
|
|
"supports_function_calling": true,
|
|
"supports_tool_choice": true,
|
|
"source": "https://docs.z.ai/guides/overview/pricing"
|
|
},
|
|
"vertex_ai/search_api": {
|
|
"input_cost_per_query": 1.5e-03,
|
|
"litellm_provider": "vertex_ai",
|
|
"mode": "vector_store"
|
|
},
|
|
"openai/container": {
|
|
"code_interpreter_cost_per_session": 0.03,
|
|
"litellm_provider": "openai",
|
|
"mode": "chat"
|
|
},
|
|
"openai/sora-2": {
|
|
"litellm_provider": "openai",
|
|
"mode": "video_generation",
|
|
"output_cost_per_video_per_second": 0.10,
|
|
"source": "https://platform.openai.com/docs/api-reference/videos",
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"video"
|
|
],
|
|
"supported_resolutions": [
|
|
"720x1280",
|
|
"1280x720"
|
|
]
|
|
},
|
|
"openai/sora-2-pro": {
|
|
"litellm_provider": "openai",
|
|
"mode": "video_generation",
|
|
"output_cost_per_video_per_second": 0.30,
|
|
"source": "https://platform.openai.com/docs/api-reference/videos",
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"video"
|
|
],
|
|
"supported_resolutions": [
|
|
"720x1280",
|
|
"1280x720"
|
|
]
|
|
},
|
|
"azure/sora-2": {
|
|
"litellm_provider": "azure",
|
|
"mode": "video_generation",
|
|
"output_cost_per_video_per_second": 0.10,
|
|
"source": "https://azure.microsoft.com/en-us/products/ai-services/video-generation",
|
|
"supported_modalities": [
|
|
"text"
|
|
],
|
|
"supported_output_modalities": [
|
|
"video"
|
|
],
|
|
"supported_resolutions": [
|
|
"720x1280",
|
|
"1280x720"
|
|
]
|
|
},
|
|
"azure/sora-2-pro": {
|
|
"litellm_provider": "azure",
|
|
"mode": "video_generation",
|
|
"output_cost_per_video_per_second": 0.30,
|
|
"source": "https://azure.microsoft.com/en-us/products/ai-services/video-generation",
|
|
"supported_modalities": [
|
|
"text"
|
|
],
|
|
"supported_output_modalities": [
|
|
"video"
|
|
],
|
|
"supported_resolutions": [
|
|
"720x1280",
|
|
"1280x720"
|
|
]
|
|
},
|
|
"azure/sora-2-pro-high-res": {
|
|
"litellm_provider": "azure",
|
|
"mode": "video_generation",
|
|
"output_cost_per_video_per_second": 0.50,
|
|
"source": "https://azure.microsoft.com/en-us/products/ai-services/video-generation",
|
|
"supported_modalities": [
|
|
"text"
|
|
],
|
|
"supported_output_modalities": [
|
|
"video"
|
|
],
|
|
"supported_resolutions": [
|
|
"1024x1792",
|
|
"1792x1024"
|
|
]
|
|
},
|
|
"runwayml/gen4_turbo": {
|
|
"litellm_provider": "runwayml",
|
|
"mode": "video_generation",
|
|
"output_cost_per_video_per_second": 0.05,
|
|
"source": "https://docs.dev.runwayml.com/guides/pricing/",
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"video"
|
|
],
|
|
"supported_resolutions": [
|
|
"1280x720",
|
|
"720x1280"
|
|
],
|
|
"metadata": {
|
|
"comment": "5 credits per second @ $0.01 per credit = $0.05 per second"
|
|
}
|
|
},
|
|
"runwayml/gen4_aleph": {
|
|
"litellm_provider": "runwayml",
|
|
"mode": "video_generation",
|
|
"output_cost_per_video_per_second": 0.15,
|
|
"source": "https://docs.dev.runwayml.com/guides/pricing/",
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"video"
|
|
],
|
|
"supported_resolutions": [
|
|
"1280x720",
|
|
"720x1280"
|
|
],
|
|
"metadata": {
|
|
"comment": "15 credits per second @ $0.01 per credit = $0.15 per second"
|
|
}
|
|
},
|
|
"runwayml/gen3a_turbo": {
|
|
"litellm_provider": "runwayml",
|
|
"mode": "video_generation",
|
|
"output_cost_per_video_per_second": 0.05,
|
|
"source": "https://docs.dev.runwayml.com/guides/pricing/",
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"video"
|
|
],
|
|
"supported_resolutions": [
|
|
"1280x720",
|
|
"720x1280"
|
|
],
|
|
"metadata": {
|
|
"comment": "5 credits per second @ $0.01 per credit = $0.05 per second"
|
|
}
|
|
},
|
|
"runwayml/gen4_image": {
|
|
"litellm_provider": "runwayml",
|
|
"mode": "image_generation",
|
|
"input_cost_per_image": 0.05,
|
|
"output_cost_per_image": 0.05,
|
|
"source": "https://docs.dev.runwayml.com/guides/pricing/",
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"image"
|
|
],
|
|
"supported_resolutions": [
|
|
"1280x720",
|
|
"1920x1080"
|
|
],
|
|
"metadata": {
|
|
"comment": "5 credits per 720p image or 8 credits per 1080p image @ $0.01 per credit. Using 5 credits ($0.05) as base cost"
|
|
}
|
|
},
|
|
"runwayml/gen4_image_turbo": {
|
|
"litellm_provider": "runwayml",
|
|
"mode": "image_generation",
|
|
"input_cost_per_image": 0.02,
|
|
"output_cost_per_image": 0.02,
|
|
"source": "https://docs.dev.runwayml.com/guides/pricing/",
|
|
"supported_modalities": [
|
|
"text",
|
|
"image"
|
|
],
|
|
"supported_output_modalities": [
|
|
"image"
|
|
],
|
|
"supported_resolutions": [
|
|
"1280x720",
|
|
"1920x1080"
|
|
],
|
|
"metadata": {
|
|
"comment": "2 credits per image (any resolution) @ $0.01 per credit = $0.02 per image"
|
|
}
|
|
},
|
|
"runwayml/eleven_multilingual_v2": {
|
|
"litellm_provider": "runwayml",
|
|
"mode": "audio_speech",
|
|
"input_cost_per_character": 3e-07,
|
|
"source": "https://docs.dev.runwayml.com/guides/pricing/",
|
|
"metadata": {
|
|
"comment": "Estimated cost based on standard TTS pricing. RunwayML uses ElevenLabs models."
|
|
}
|
|
}
|
|
}
|