Skip to content
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
167 changes: 0 additions & 167 deletions model_prices_and_context_window.json
Original file line number Diff line number Diff line change
Expand Up @@ -378,47 +378,6 @@
"search_context_size_high": 0.05
}
},
"gpt-4.5-preview": {
"max_tokens": 16384,
"max_input_tokens": 128000,
"max_output_tokens": 16384,
"input_cost_per_token": 7.5e-05,
"output_cost_per_token": 0.00015,
"input_cost_per_token_batches": 3.75e-05,
"output_cost_per_token_batches": 7.5e-05,
"cache_read_input_token_cost": 3.75e-05,
"litellm_provider": "openai",
"mode": "chat",
"supports_pdf_input": true,
"supports_function_calling": true,
"supports_parallel_function_calling": true,
"supports_response_schema": true,
"supports_vision": true,
"supports_prompt_caching": true,
"supports_system_messages": true,
"supports_tool_choice": true
},
"gpt-4.5-preview-2025-02-27": {
"max_tokens": 16384,
"max_input_tokens": 128000,
"max_output_tokens": 16384,
"input_cost_per_token": 7.5e-05,
"output_cost_per_token": 0.00015,
"input_cost_per_token_batches": 3.75e-05,
"output_cost_per_token_batches": 7.5e-05,
"cache_read_input_token_cost": 3.75e-05,
"litellm_provider": "openai",
"mode": "chat",
"supports_pdf_input": true,
"supports_function_calling": true,
"supports_parallel_function_calling": true,
"supports_response_schema": true,
"supports_vision": true,
"supports_prompt_caching": true,
"supports_system_messages": true,
"supports_tool_choice": true,
"deprecation_date": "2025-07-14"
},
"gpt-4o-audio-preview": {
"max_tokens": 16384,
"max_input_tokens": 128000,
Expand Down Expand Up @@ -1605,18 +1564,6 @@
"supports_system_messages": true,
"supports_tool_choice": true
},
"gpt-4-0314": {
"max_tokens": 4096,
"max_input_tokens": 8192,
"max_output_tokens": 4096,
"input_cost_per_token": 3e-05,
"output_cost_per_token": 6e-05,
"litellm_provider": "openai",
"mode": "chat",
"supports_prompt_caching": true,
"supports_system_messages": true,
"supports_tool_choice": true
},
"gpt-4-0613": {
"max_tokens": 4096,
"max_input_tokens": 8192,
Expand All @@ -1631,42 +1578,6 @@
"deprecation_date": "2025-06-06",
"supports_tool_choice": true
},
"gpt-4-32k": {
"max_tokens": 4096,
"max_input_tokens": 32768,
"max_output_tokens": 4096,
"input_cost_per_token": 6e-05,
"output_cost_per_token": 0.00012,
"litellm_provider": "openai",
"mode": "chat",
"supports_prompt_caching": true,
"supports_system_messages": true,
"supports_tool_choice": true
},
"gpt-4-32k-0314": {
"max_tokens": 4096,
"max_input_tokens": 32768,
"max_output_tokens": 4096,
"input_cost_per_token": 6e-05,
"output_cost_per_token": 0.00012,
"litellm_provider": "openai",
"mode": "chat",
"supports_prompt_caching": true,
"supports_system_messages": true,
"supports_tool_choice": true
},
"gpt-4-32k-0613": {
"max_tokens": 4096,
"max_input_tokens": 32768,
"max_output_tokens": 4096,
"input_cost_per_token": 6e-05,
"output_cost_per_token": 0.00012,
"litellm_provider": "openai",
"mode": "chat",
"supports_prompt_caching": true,
"supports_system_messages": true,
"supports_tool_choice": true
},
"gpt-4-turbo": {
"max_tokens": 4096,
"max_input_tokens": 128000,
Expand Down Expand Up @@ -1727,36 +1638,6 @@
"supports_system_messages": true,
"supports_tool_choice": true
},
"gpt-4-vision-preview": {
"max_tokens": 4096,
"max_input_tokens": 128000,
"max_output_tokens": 4096,
"input_cost_per_token": 1e-05,
"output_cost_per_token": 3e-05,
"litellm_provider": "openai",
"mode": "chat",
"supports_vision": true,
"supports_pdf_input": true,
"supports_prompt_caching": true,
"supports_system_messages": true,
"deprecation_date": "2024-12-06",
"supports_tool_choice": true
},
"gpt-4-1106-vision-preview": {
"max_tokens": 4096,
"max_input_tokens": 128000,
"max_output_tokens": 4096,
"input_cost_per_token": 1e-05,
"output_cost_per_token": 3e-05,
"litellm_provider": "openai",
"mode": "chat",
"supports_vision": true,
"supports_pdf_input": true,
"supports_prompt_caching": true,
"supports_system_messages": true,
"deprecation_date": "2024-12-06",
"supports_tool_choice": true
},
"gpt-3.5-turbo": {
"max_tokens": 4097,
"max_input_tokens": 16385,
Expand All @@ -1770,31 +1651,6 @@
"supports_system_messages": true,
"supports_tool_choice": true
},
"gpt-3.5-turbo-0301": {
"max_tokens": 4097,
"max_input_tokens": 4097,
"max_output_tokens": 4096,
"input_cost_per_token": 1.5e-06,
"output_cost_per_token": 2e-06,
"litellm_provider": "openai",
"mode": "chat",
"supports_prompt_caching": true,
"supports_system_messages": true,
"supports_tool_choice": true
},
"gpt-3.5-turbo-0613": {
"max_tokens": 4097,
"max_input_tokens": 4097,
"max_output_tokens": 4096,
"input_cost_per_token": 1.5e-06,
"output_cost_per_token": 2e-06,
"litellm_provider": "openai",
"mode": "chat",
"supports_function_calling": true,
"supports_prompt_caching": true,
"supports_system_messages": true,
"supports_tool_choice": true
},
"gpt-3.5-turbo-1106": {
"max_tokens": 16385,
"max_input_tokens": 16385,
Expand Down Expand Up @@ -1835,18 +1691,6 @@
"supports_system_messages": true,
"supports_tool_choice": true
},
"gpt-3.5-turbo-16k-0613": {
"max_tokens": 16385,
"max_input_tokens": 16385,
"max_output_tokens": 4096,
"input_cost_per_token": 3e-06,
"output_cost_per_token": 4e-06,
"litellm_provider": "openai",
"mode": "chat",
"supports_prompt_caching": true,
"supports_system_messages": true,
"supports_tool_choice": true
},
"ft:gpt-3.5-turbo": {
"max_tokens": 4096,
"max_input_tokens": 16385,
Expand Down Expand Up @@ -1882,17 +1726,6 @@
"supports_system_messages": true,
"supports_tool_choice": true
},
"ft:gpt-3.5-turbo-0613": {
"max_tokens": 4096,
"max_input_tokens": 4096,
"max_output_tokens": 4096,
"input_cost_per_token": 3e-06,
"output_cost_per_token": 6e-06,
"litellm_provider": "openai",
"mode": "chat",
"supports_system_messages": true,
"supports_tool_choice": true
},
"ft:gpt-4-0613": {
"max_tokens": 4096,
"max_input_tokens": 8192,
Expand Down
Loading