diff --git a/model_prices_and_context_window.json b/model_prices_and_context_window.json index 4278b537f3e6..60f812b2bb6d 100644 --- a/model_prices_and_context_window.json +++ b/model_prices_and_context_window.json @@ -4085,6 +4085,61 @@ "input_cost_per_request": 0.005, "litellm_provider": "perplexity", "mode": "chat" + }, + "fireworks_ai/firefunction-v2": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 0.0000009, + "output_cost_per_token": 0.0000009, + "litellm_provider": "fireworks_ai", + "mode": "chat", + "supports_function_calling": true, + "source": "https://fireworks.ai/pricing" + }, + "fireworks_ai/mixtral-8x22b-instruct-hf": { + "max_tokens": 65536, + "max_input_tokens": 65536, + "max_output_tokens": 65536, + "input_cost_per_token": 0.0000012, + "output_cost_per_token": 0.0000012, + "litellm_provider": "fireworks_ai", + "mode": "chat", + "supports_function_calling": true, + "source": "https://fireworks.ai/pricing" + }, + "fireworks_ai/qwen2-72b-instruct": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 0.0000009, + "output_cost_per_token": 0.0000009, + "litellm_provider": "fireworks_ai", + "mode": "chat", + "supports_function_calling": true, + "source": "https://fireworks.ai/pricing" + }, + "fireworks_ai/yi-large": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 0.000003, + "output_cost_per_token": 0.000003, + "litellm_provider": "fireworks_ai", + "mode": "chat", + "supports_function_calling": true, + "source": "https://fireworks.ai/pricing" + }, + "fireworks_ai/deepseek-coder-v2-instruct": { + "max_tokens": 65536, + "max_input_tokens": 65536, + "max_output_tokens": 8192, + "input_cost_per_token": 0.0000012, + "output_cost_per_token": 0.0000012, + "litellm_provider": "fireworks_ai", + "mode": "chat", + "supports_function_calling": true, + "source": "https://fireworks.ai/pricing" }, "anyscale/mistralai/Mistral-7B-Instruct-v0.1": { "max_tokens": 16384,