diff --git a/.circleci/config.yml b/.circleci/config.yml
index 3294845bd92b..52c2115bf5fd 100644
--- a/.circleci/config.yml
+++ b/.circleci/config.yml
@@ -1358,6 +1358,7 @@ jobs:
# - run: python ./tests/documentation_tests/test_general_setting_keys.py
- run: python ./tests/code_coverage_tests/check_licenses.py
- run: python ./tests/code_coverage_tests/router_code_coverage.py
+ - run: python ./tests/code_coverage_tests/test_proxy_types_import.py
- run: python ./tests/code_coverage_tests/callback_manager_test.py
- run: python ./tests/code_coverage_tests/recursive_detector.py
- run: python ./tests/code_coverage_tests/test_router_strategy_async.py
diff --git a/docs/my-website/docs/proxy/cost_tracking.md b/docs/my-website/docs/proxy/cost_tracking.md
index 58a6b1f27e62..019ca3da125b 100644
--- a/docs/my-website/docs/proxy/cost_tracking.md
+++ b/docs/my-website/docs/proxy/cost_tracking.md
@@ -255,6 +255,198 @@ curl -L -X GET 'http://localhost:4000/user/daily/activity?start_date=2025-03-20&
See our [Swagger API](https://litellm-api.up.railway.app/#/Budget%20%26%20Spend%20Tracking/get_user_daily_activity_user_daily_activity_get) for more details on the `/user/daily/activity` endpoint
+## Custom Tags
+
+Requirements:
+
+- Virtual Keys & a database should be set up, see [virtual keys](https://docs.litellm.ai/docs/proxy/virtual_keys)
+
+**Note:** By default, LiteLLM will track `User-Agent` as a custom tag for cost tracking. This enables viewing usage for tools like Claude Code, Gemini CLI, etc.
+
+
+
+
+
+### Client-side spend tag
+
+
+
+
+```bash
+curl -L -X POST 'http://0.0.0.0:4000/key/generate' \
+-H 'Authorization: Bearer sk-1234' \
+-H 'Content-Type: application/json' \
+-d '{
+ "metadata": {
+ "tags": ["tag1", "tag2", "tag3"]
+ }
+}
+
+'
+```
+
+
+
+
+```bash
+curl -L -X POST 'http://0.0.0.0:4000/team/new' \
+-H 'Authorization: Bearer sk-1234' \
+-H 'Content-Type: application/json' \
+-d '{
+ "metadata": {
+ "tags": ["tag1", "tag2", "tag3"]
+ }
+}
+
+'
+```
+
+
+
+
+Set `extra_body={"metadata": { }}` to `metadata` you want to pass
+
+```python
+import openai
+client = openai.OpenAI(
+ api_key="anything",
+ base_url="http://0.0.0.0:4000"
+)
+
+
+response = client.chat.completions.create(
+ model="gpt-3.5-turbo",
+ messages = [
+ {
+ "role": "user",
+ "content": "this is a test request, write a short poem"
+ }
+ ],
+ extra_body={
+ "metadata": {
+ "tags": ["model-anthropic-claude-v2.1", "app-ishaan-prod"] # 👈 Key Change
+ }
+ }
+)
+
+print(response)
+```
+
+
+
+
+
+```js
+const openai = require('openai');
+
+async function runOpenAI() {
+ const client = new openai.OpenAI({
+ apiKey: 'sk-1234',
+ baseURL: 'http://0.0.0.0:4000'
+ });
+
+ try {
+ const response = await client.chat.completions.create({
+ model: 'gpt-3.5-turbo',
+ messages: [
+ {
+ role: 'user',
+ content: "this is a test request, write a short poem"
+ },
+ ],
+ metadata: {
+ tags: ["model-anthropic-claude-v2.1", "app-ishaan-prod"] // 👈 Key Change
+ }
+ });
+ console.log(response);
+ } catch (error) {
+ console.log("got this exception from server");
+ console.error(error);
+ }
+}
+
+// Call the asynchronous function
+runOpenAI();
+```
+
+
+
+
+Pass `metadata` as part of the request body
+
+```shell
+curl --location 'http://0.0.0.0:4000/chat/completions' \
+ --header 'Content-Type: application/json' \
+ --data '{
+ "model": "gpt-3.5-turbo",
+ "messages": [
+ {
+ "role": "user",
+ "content": "what llm are you"
+ }
+ ],
+ "metadata": {"tags": ["model-anthropic-claude-v2.1", "app-ishaan-prod"]}
+}'
+```
+
+
+
+```python
+from langchain.chat_models import ChatOpenAI
+from langchain.prompts.chat import (
+ ChatPromptTemplate,
+ HumanMessagePromptTemplate,
+ SystemMessagePromptTemplate,
+)
+from langchain.schema import HumanMessage, SystemMessage
+
+chat = ChatOpenAI(
+ openai_api_base="http://0.0.0.0:4000",
+ model = "gpt-3.5-turbo",
+ temperature=0.1,
+ extra_body={
+ "metadata": {
+ "tags": ["model-anthropic-claude-v2.1", "app-ishaan-prod"]
+ }
+ }
+)
+
+messages = [
+ SystemMessage(
+ content="You are a helpful assistant that im using to make a test request to."
+ ),
+ HumanMessage(
+ content="test from litellm. tell me why it's amazing in 1 sentence"
+ ),
+]
+response = chat(messages)
+
+print(response)
+```
+
+
+
+
+
+
+### Add custom headers to spend tracking
+
+You can add custom headers to the request to track spend and usage.
+
+```yaml
+litellm_settings:
+ extra_spend_tag_headers:
+ - "x-custom-header"
+```
+
+### Disable user-agent tracking
+
+You can disable user-agent tracking by setting `litellm_settings.disable_user_agent_tracking` to `true`.
+
+```yaml
+litellm_settings:
+ disable_user_agent_tracking: true
+```
## ✨ (Enterprise) Generate Spend Reports
Use this to charge other teams, customers, users
@@ -617,11 +809,5 @@ Logging specific key,value pairs in spend logs metadata is an enterprise feature
:::
-## ✨ Custom Tags
-:::info
-
-Tracking spend with Custom tags is an enterprise feature. [See here](./enterprise.md#tracking-spend-for-custom-tags)
-
-:::
diff --git a/docs/my-website/docs/proxy/custom_root_ui.md b/docs/my-website/docs/proxy/custom_root_ui.md
index 1bab94314749..28ef57d81a46 100644
--- a/docs/my-website/docs/proxy/custom_root_ui.md
+++ b/docs/my-website/docs/proxy/custom_root_ui.md
@@ -12,6 +12,9 @@ Requires v1.72.3 or higher.
:::
+Limitations:
+- This does not work in [litellm non-root](./deploy#non-root---without-internet-connection) images, as it requires write access to the UI files.
+
## Usage
### 1. Set `SERVER_ROOT_PATH` in your .env
diff --git a/docs/my-website/docs/proxy/enterprise.md b/docs/my-website/docs/proxy/enterprise.md
index 8ea8e748e94e..d5ba3fc7f53c 100644
--- a/docs/my-website/docs/proxy/enterprise.md
+++ b/docs/my-website/docs/proxy/enterprise.md
@@ -29,7 +29,6 @@ Features:
- ✅ [Team Based Logging](./team_logging.md) - Allow each team to use their own Langfuse Project / custom callbacks
- ✅ [Disable Logging for a Team](./team_logging.md#disable-logging-for-a-team) - Switch off all logging for a team/project (GDPR Compliance)
- **Spend Tracking & Data Exports**
- - ✅ [Tracking Spend for Custom Tags](#tracking-spend-for-custom-tags)
- ✅ [Set USD Budgets Spend for Custom Tags](./provider_budget_routing#-tag-budgets)
- ✅ [Set Model budgets for Virtual Keys](./users#-virtual-key-model-specific)
- ✅ [Exporting LLM Logs to GCS Bucket, Azure Blob Storage](./proxy/bucket#🪣-logging-gcs-s3-buckets)
@@ -332,174 +331,6 @@ curl --location 'http://0.0.0.0:4000/embeddings' \
## Spend Tracking
-### Custom Tags
-
-Requirements:
-
-- Virtual Keys & a database should be set up, see [virtual keys](https://docs.litellm.ai/docs/proxy/virtual_keys)
-
-#### Usage - /chat/completions requests with request tags
-
-
-
-
-
-```bash
-curl -L -X POST 'http://0.0.0.0:4000/key/generate' \
--H 'Authorization: Bearer sk-1234' \
--H 'Content-Type: application/json' \
--d '{
- "metadata": {
- "tags": ["tag1", "tag2", "tag3"]
- }
-}
-
-'
-```
-
-
-
-
-```bash
-curl -L -X POST 'http://0.0.0.0:4000/team/new' \
--H 'Authorization: Bearer sk-1234' \
--H 'Content-Type: application/json' \
--d '{
- "metadata": {
- "tags": ["tag1", "tag2", "tag3"]
- }
-}
-
-'
-```
-
-
-
-
-Set `extra_body={"metadata": { }}` to `metadata` you want to pass
-
-```python
-import openai
-client = openai.OpenAI(
- api_key="anything",
- base_url="http://0.0.0.0:4000"
-)
-
-
-response = client.chat.completions.create(
- model="gpt-3.5-turbo",
- messages = [
- {
- "role": "user",
- "content": "this is a test request, write a short poem"
- }
- ],
- extra_body={
- "metadata": {
- "tags": ["model-anthropic-claude-v2.1", "app-ishaan-prod"] # 👈 Key Change
- }
- }
-)
-
-print(response)
-```
-
-
-
-
-
-```js
-const openai = require('openai');
-
-async function runOpenAI() {
- const client = new openai.OpenAI({
- apiKey: 'sk-1234',
- baseURL: 'http://0.0.0.0:4000'
- });
-
- try {
- const response = await client.chat.completions.create({
- model: 'gpt-3.5-turbo',
- messages: [
- {
- role: 'user',
- content: "this is a test request, write a short poem"
- },
- ],
- metadata: {
- tags: ["model-anthropic-claude-v2.1", "app-ishaan-prod"] // 👈 Key Change
- }
- });
- console.log(response);
- } catch (error) {
- console.log("got this exception from server");
- console.error(error);
- }
-}
-
-// Call the asynchronous function
-runOpenAI();
-```
-
-
-
-
-Pass `metadata` as part of the request body
-
-```shell
-curl --location 'http://0.0.0.0:4000/chat/completions' \
- --header 'Content-Type: application/json' \
- --data '{
- "model": "gpt-3.5-turbo",
- "messages": [
- {
- "role": "user",
- "content": "what llm are you"
- }
- ],
- "metadata": {"tags": ["model-anthropic-claude-v2.1", "app-ishaan-prod"]}
-}'
-```
-
-
-
-```python
-from langchain.chat_models import ChatOpenAI
-from langchain.prompts.chat import (
- ChatPromptTemplate,
- HumanMessagePromptTemplate,
- SystemMessagePromptTemplate,
-)
-from langchain.schema import HumanMessage, SystemMessage
-
-chat = ChatOpenAI(
- openai_api_base="http://0.0.0.0:4000",
- model = "gpt-3.5-turbo",
- temperature=0.1,
- extra_body={
- "metadata": {
- "tags": ["model-anthropic-claude-v2.1", "app-ishaan-prod"]
- }
- }
-)
-
-messages = [
- SystemMessage(
- content="You are a helpful assistant that im using to make a test request to."
- ),
- HumanMessage(
- content="test from litellm. tell me why it's amazing in 1 sentence"
- ),
-]
-response = chat(messages)
-
-print(response)
-```
-
-
-
-
-
#### Viewing Spend per tag
#### `/spend/tags` Request Format
diff --git a/docs/my-website/docs/proxy/guardrails/panw_prisma_airs.md b/docs/my-website/docs/proxy/guardrails/panw_prisma_airs.md
index 6e38214e5652..20cbc60a3e91 100644
--- a/docs/my-website/docs/proxy/guardrails/panw_prisma_airs.md
+++ b/docs/my-website/docs/proxy/guardrails/panw_prisma_airs.md
@@ -124,7 +124,6 @@ Expected response on failure:
```
-
```shell
diff --git a/docs/my-website/docs/tutorials/litellm_gemini_cli.md b/docs/my-website/docs/tutorials/litellm_gemini_cli.md
index 6c37c47c206a..bf8e2cb44cb1 100644
--- a/docs/my-website/docs/tutorials/litellm_gemini_cli.md
+++ b/docs/my-website/docs/tutorials/litellm_gemini_cli.md
@@ -5,7 +5,7 @@ This tutorial shows you how to integrate the Gemini CLI with LiteLLM Proxy, allo
:::info
-This integration is supported from LiteLLMv1.73.3-nightly and above.
+This integration is supported from LiteLLM v1.73.3-nightly and above.
:::
@@ -13,6 +13,19 @@ This integration is supported from LiteLLMv1.73.3-nightly and above.
+## Benefits of using gemini-cli with LiteLLM
+
+When you use gemini-cli with LiteLLM you get the following benefits:
+
+**Developer Benefits:**
+- Universal Model Access: Use any LiteLLM supported model (Anthropic, OpenAI, Vertex AI, Bedrock, etc.) through the gemini-cli interface.
+- Higher Rate Limits & Reliability: Load balance across multiple models and providers to avoid hitting individual provider limits, with fallbacks to ensure you get responses even if one provider fails.
+
+**Proxy Admin Benefits:**
+- Centralized Management: Control access to all models through a single LiteLLM proxy instance without giving your developers API Keys to each provider.
+- Budget Controls: Set spending limits and track costs across all gemini-cli usage.
+
+
## Prerequisites
@@ -63,6 +76,99 @@ The CLI will now use LiteLLM Proxy as the backend, giving you access to LiteLLM'
- Cost tracking
- Model routing and fallbacks
+
+## Advanced
+
+### Use Anthropic, OpenAI, Bedrock, etc. models on gemini-cli
+
+In order to use non-gemini models on gemini-cli, you need to set a `model_group_alias` in the LiteLLM Proxy config. This tells LiteLLM that requests with model = `gemini-2.5-pro` should be routed to your desired model from any provider.
+
+import Tabs from '@theme/Tabs';
+import TabItem from '@theme/TabItem';
+
+
+
+
+Route `gemini-2.5-pro` requests to Claude Sonnet:
+
+```yaml showLineNumbers title="proxy_config.yaml"
+model_list:
+ - model_name: claude-sonnet-4-20250514
+ litellm_params:
+ model: anthropic/claude-3-5-sonnet-20241022
+ api_key: os.environ/ANTHROPIC_API_KEY
+
+router_settings:
+ model_group_alias: {"gemini-2.5-pro": "claude-sonnet-4-20250514"}
+```
+
+
+
+
+Route `gemini-2.5-pro` requests to GPT-4o:
+
+```yaml showLineNumbers title="proxy_config.yaml"
+model_list:
+ - model_name: gpt-4o-model
+ litellm_params:
+ model: gpt-4o
+ api_key: os.environ/OPENAI_API_KEY
+
+router_settings:
+ model_group_alias: {"gemini-2.5-pro": "gpt-4o-model"}
+```
+
+
+
+
+Route `gemini-2.5-pro` requests to Claude on Bedrock:
+
+```yaml showLineNumbers title="proxy_config.yaml"
+model_list:
+ - model_name: bedrock-claude
+ litellm_params:
+ model: bedrock/anthropic.claude-3-5-sonnet-20241022-v2:0
+ aws_access_key_id: os.environ/AWS_ACCESS_KEY_ID
+ aws_secret_access_key: os.environ/AWS_SECRET_ACCESS_KEY
+ aws_region_name: us-east-1
+
+router_settings:
+ model_group_alias: {"gemini-2.5-pro": "bedrock-claude"}
+```
+
+
+
+
+All deployments with model_name=`anthropic-claude` will be load balanced. In this example we load balance between Anthropic and Bedrock.
+
+```yaml showLineNumbers title="proxy_config.yaml"
+model_list:
+ - model_name: anthropic-claude
+ litellm_params:
+ model: anthropic/claude-3-5-sonnet-20241022
+ api_key: os.environ/ANTHROPIC_API_KEY
+ - model_name: anthropic-claude
+ litellm_params:
+ model: bedrock/anthropic.claude-3-5-sonnet-20241022-v2:0
+ aws_access_key_id: os.environ/AWS_ACCESS_KEY_ID
+ aws_secret_access_key: os.environ/AWS_SECRET_ACCESS_KEY
+ aws_region_name: us-east-1
+
+router_settings:
+ model_group_alias: {"gemini-2.5-pro": "anthropic-claude"}
+```
+
+
+
+
+With this configuration, when you use `gemini-2.5-pro` in the CLI, LiteLLM will automatically route your requests to the configured provider(s) with load balancing and fallbacks.
+
+
+
+
+
+
+
## Troubleshooting
If you encounter issues:
diff --git a/docs/my-website/docs/tutorials/openweb_ui.md b/docs/my-website/docs/tutorials/openweb_ui.md
index 82ff475add98..1744366b477d 100644
--- a/docs/my-website/docs/tutorials/openweb_ui.md
+++ b/docs/my-website/docs/tutorials/openweb_ui.md
@@ -135,3 +135,18 @@ On the models dropdown select `thinking-anthropic-claude-3-7-sonnet`
## Additional Resources
- Running LiteLLM and Open WebUI on Windows Localhost: A Comprehensive Guide [https://www.tanyongsheng.com/note/running-litellm-and-openwebui-on-windows-localhost-a-comprehensive-guide/](https://www.tanyongsheng.com/note/running-litellm-and-openwebui-on-windows-localhost-a-comprehensive-guide/)
+
+
+## Add Custom Headers to Spend Tracking
+
+You can add custom headers to the request to track spend and usage.
+
+```yaml
+litellm_settings:
+ extra_spend_tag_headers:
+ - "x-custom-header"
+```
+
+You can add custom headers to the request to track spend and usage.
+
+
\ No newline at end of file
diff --git a/docs/my-website/img/claude_cli_tag_usage.png b/docs/my-website/img/claude_cli_tag_usage.png
new file mode 100644
index 000000000000..ec0d7fd93dc0
Binary files /dev/null and b/docs/my-website/img/claude_cli_tag_usage.png differ
diff --git a/docs/my-website/img/custom_tag_headers.png b/docs/my-website/img/custom_tag_headers.png
new file mode 100644
index 000000000000..a952a0840add
Binary files /dev/null and b/docs/my-website/img/custom_tag_headers.png differ
diff --git a/docs/my-website/img/release_notes/batch_api_cost_tracking.jpg b/docs/my-website/img/release_notes/batch_api_cost_tracking.jpg
new file mode 100644
index 000000000000..f6a9b8ccdafd
Binary files /dev/null and b/docs/my-website/img/release_notes/batch_api_cost_tracking.jpg differ
diff --git a/docs/my-website/img/release_notes/gemini_cli.png b/docs/my-website/img/release_notes/gemini_cli.png
new file mode 100644
index 000000000000..c0d5681bf46f
Binary files /dev/null and b/docs/my-website/img/release_notes/gemini_cli.png differ
diff --git a/docs/my-website/release_notes/v1.73.6-stable/index.md b/docs/my-website/release_notes/v1.73.6-stable/index.md
new file mode 100644
index 000000000000..76c51ddbf354
--- /dev/null
+++ b/docs/my-website/release_notes/v1.73.6-stable/index.md
@@ -0,0 +1,277 @@
+---
+title: "[PRE-RELEASE] v1.73.6-stable"
+slug: "v1-73-6-stable"
+date: 2025-06-28T10:00:00
+authors:
+ - name: Krrish Dholakia
+ title: CEO, LiteLLM
+ url: https://www.linkedin.com/in/krish-d/
+ image_url: https://pbs.twimg.com/profile_images/1298587542745358340/DZv3Oj-h_400x400.jpg
+ - name: Ishaan Jaffer
+ title: CTO, LiteLLM
+ url: https://www.linkedin.com/in/reffajnaahsi/
+ image_url: https://pbs.twimg.com/profile_images/1613813310264340481/lz54oEiB_400x400.jpg
+
+hide_table_of_contents: false
+---
+
+import Image from '@theme/IdealImage';
+import Tabs from '@theme/Tabs';
+import TabItem from '@theme/TabItem';
+
+
+:::warning
+
+## Known Issues
+
+The `non-root` docker image has a known issue around the UI not loading. If you use the `non-root` docker image we recommend waiting before upgrading to this version. We will post a patch fix for this.
+
+:::
+
+## Deploy this version
+
+
+
+
+``` showLineNumbers title="docker run litellm"
+docker run \
+-e STORE_MODEL_IN_DB=True \
+-p 4000:4000 \
+ghcr.io/berriai/litellm:v1.73.6.rc.1
+```
+
+
+
+
+The pip package is not yet available.
+
+
+
+
+---
+
+## Key Highlights
+
+
+### Claude on gemini-cli
+
+
+
+
+
+
+This release brings support for using gemini-cli with LiteLLM.
+
+You can use claude-sonnet-4, gemini-2.5-flash (Vertex AI & Google AI Studio), gpt-4.1 and any LiteLLM supported model on gemini-cli.
+
+When you use gemini-cli with LiteLLM you get the following benefits:
+
+**Developer Benefits:**
+- Universal Model Access: Use any LiteLLM supported model (Anthropic, OpenAI, Vertex AI, Bedrock, etc.) through the gemini-cli interface.
+- Higher Rate Limits & Reliability: Load balance across multiple models and providers to avoid hitting individual provider limits, with fallbacks to ensure you get responses even if one provider fails.
+
+**Proxy Admin Benefits:**
+- Centralized Management: Control access to all models through a single LiteLLM proxy instance without giving your developers API Keys to each provider.
+- Budget Controls: Set spending limits and track costs across all gemini-cli usage.
+
+[Get Started](../../docs/tutorials/litellm_gemini_cli)
+
+
+
+### Batch API Cost Tracking
+
+
+
+
+
+v1.73.6 brings cost tracking for [LiteLLM Managed Batch API](../../docs/proxy/managed_batches) calls to LiteLLM. Previously, this was not being done for Batch API calls using LiteLLM Managed Files. Now, LiteLLM will store the status of each batch call in the DB and poll incomplete batch jobs in the background, emitting a spend log for cost tracking once the batch is complete.
+
+There is no new flag / change needed on your end. Over the next few weeks we hope to extend this to cover batch cost tracking for the Anthropic passthrough as well.
+
+
+[Get Started](../../docs/proxy/managed_batches)
+
+---
+
+## New Models / Updated Models
+
+### Pricing / Context Window Updates
+
+| Provider | Model | Context Window | Input ($/1M tokens) | Output ($/1M tokens) | Type |
+| ----------- | -------------------------------------- | -------------- | ------------------- | -------------------- | ---- |
+| Azure OpenAI | `azure/o3-pro` | 200k | $20.00 | $80.00 | New |
+| OpenRouter | `openrouter/mistralai/mistral-small-3.2-24b-instruct` | 32k | $0.1 | $0.3 | New |
+| OpenAI | `o3-deep-research` | 200k | $10.00 | $40.00 | New |
+| OpenAI | `o3-deep-research-2025-06-26` | 200k | $10.00 | $40.00 | New |
+| OpenAI | `o4-mini-deep-research` | 200k | $2.00 | $8.00 | New |
+| OpenAI | `o4-mini-deep-research-2025-06-26` | 200k | $2.00 | $8.00 | New |
+| Deepseek | `deepseek-r1` | 65k | $0.55 | $2.19 | New |
+| Deepseek | `deepseek-v3` | 65k | $0.27 | $0.07 | New |
+
+
+### Updated Models
+#### Bugs
+ - **[Sambanova](../../docs/providers/sambanova)**
+ - Handle float timestamps - [PR](https://github.com/BerriAI/litellm/pull/11971) s/o [@neubig](https://github.com/neubig)
+ - **[Azure](../../docs/providers/azure)**
+ - support Azure Authentication method (azure ad token, api keys) on Responses API - [PR](https://github.com/BerriAI/litellm/pull/11941) s/o [@hsuyuming](https://github.com/hsuyuming)
+ - Map ‘image_url’ str as nested dict - [PR](https://github.com/BerriAI/litellm/pull/12075) s/o [@davis-featherstone](https://github.com/davis-featherstone)
+ - **[Watsonx](../../docs/providers/watsonx)**
+ - Set ‘model’ field to None when model is part of a custom deployment - fixes error raised by WatsonX in those cases - [PR](https://github.com/BerriAI/litellm/pull/11854) s/o [@cbjuan](https://github.com/cbjuan)
+ - **[Perplexity](../../docs/providers/perplexity)**
+ - Support web_search_options - [PR](https://github.com/BerriAI/litellm/pull/11983)
+ - Support citation token and search queries cost calculation - [PR](https://github.com/BerriAI/litellm/pull/11938)
+ - **[Anthropic](../../docs/providers/anthropic)**
+ - Null value in usage block handling - [PR](https://github.com/BerriAI/litellm/pull/12068)
+ - **Gemini ([Google AI Studio](../../docs/providers/gemini) + [VertexAI](../../docs/providers/vertex))**
+ - Only use accepted format values (enum and datetime) - else gemini raises errors - [PR](https://github.com/BerriAI/litellm/pull/11989)
+ - Cache tools if passed alongside cached content (else gemini raises an error) - [PR](https://github.com/BerriAI/litellm/pull/11989)
+ - Json schema translation improvement: Fix unpack_def handling of nested $ref inside anyof items - [PR](https://github.com/BerriAI/litellm/pull/11964)
+ - **[Mistral](../../docs/providers/mistral)**
+ - Fix thinking prompt to match hugging face recommendation - [PR](https://github.com/BerriAI/litellm/pull/12007)
+ - Add `supports_response_schema: true` for all mistral models except codestral-mamba - [PR](https://github.com/BerriAI/litellm/pull/12024)
+ - **[Ollama](../../docs/providers/ollama)**
+ - Fix unnecessary await on embedding calls - [PR](https://github.com/BerriAI/litellm/pull/12024)
+#### Features
+ - **[Azure OpenAI](../../docs/providers/azure)**
+ - Check if o-series model supports reasoning effort (enables drop_params to work for o1 models)
+ - Assistant + tool use cost tracking - [PR](https://github.com/BerriAI/litellm/pull/12045)
+ - **[Nvidia Nim](../../docs/providers/nvidia_nim)**
+ - Add ‘response_format’ param support - [PR](https://github.com/BerriAI/litellm/pull/12003) @shagunb-acn
+ - **[ElevenLabs](../../docs/providers/elevenlabs)**
+ - New STT provider - [PR](https://github.com/BerriAI/litellm/pull/12119)
+
+---
+## LLM API Endpoints
+
+#### Features
+ - [**/mcp**](../../docs/mcp)
+ - Send appropriate auth string value to `/tool/call` endpoint with `x-mcp-auth` - [PR](https://github.com/BerriAI/litellm/pull/11968) s/o [@wagnerjt](https://github.com/wagnerjt)
+ - [**/v1/messages**](../../docs/anthropic_unified)
+ - [Custom LLM](../../docs/providers/custom_llm_server#anthropic-v1messages) support - [PR](https://github.com/BerriAI/litellm/pull/12016)
+ - [**/chat/completions**](../../docs/completion/input)
+ - Azure Responses API via chat completion support - [PR](https://github.com/BerriAI/litellm/pull/12016)
+ - [**/responses**](../../docs/response_api)
+ - Add reasoning content support for non-openai providers - [PR](https://github.com/BerriAI/litellm/pull/12055)
+ - **[NEW] /generateContent**
+ - New endpoints for gemini cli support - [PR](https://github.com/BerriAI/litellm/pull/12040)
+ - Support calling Google AI Studio / VertexAI Gemini models in their native format - [PR](https://github.com/BerriAI/litellm/pull/12046)
+ - Add logging + cost tracking for stream + non-stream vertex/google ai studio routes - [PR](https://github.com/BerriAI/litellm/pull/12058)
+ - Add Bridge from generateContent to /chat/completions - [PR](https://github.com/BerriAI/litellm/pull/12081)
+ - [**/batches**](../../docs/batches)
+ - Filter deployments to only those where managed file was written to - [PR](https://github.com/BerriAI/litellm/pull/12048)
+ - Save all model / file id mappings in db (previously it was just the first one) - enables ‘true’ loadbalancing - [PR](https://github.com/BerriAI/litellm/pull/12048)
+ - Support List Batches with target model name specified - [PR](https://github.com/BerriAI/litellm/pull/12049)
+
+---
+## Spend Tracking / Budget Improvements
+
+#### Features
+ - [**Passthrough**](../../docs/pass_through)
+ - [Bedrock](../../docs/pass_through/bedrock) - cost tracking (`/invoke` + `/converse` routes) on streaming + non-streaming - [PR](https://github.com/BerriAI/litellm/pull/12123)
+ - [VertexAI](../../docs/pass_through/vertex_ai) - anthropic cost calculation support - [PR](https://github.com/BerriAI/litellm/pull/11992)
+ - [**Batches**](../../docs/batches)
+ - Background job for cost tracking LiteLLM Managed batches - [PR](https://github.com/BerriAI/litellm/pull/12125)
+
+---
+## Management Endpoints / UI
+
+#### Bugs
+ - **General UI**
+ - Fix today selector date mutation in dashboard components - [PR](https://github.com/BerriAI/litellm/pull/12042)
+ - **Usage**
+ - Aggregate usage data across all pages of paginated endpoint - [PR](https://github.com/BerriAI/litellm/pull/12033)
+ - **Teams**
+ - De-duplicate models in team settings dropdown - [PR](https://github.com/BerriAI/litellm/pull/12074)
+ - **Models**
+ - Preserve public model name when selecting ‘test connect’ with azure model (previously would reset) - [PR](https://github.com/BerriAI/litellm/pull/11713)
+ - **Invitation Links**
+ - Ensure Invite links email contain the correct invite id when using tf provider - [PR](https://github.com/BerriAI/litellm/pull/12130)
+#### Features
+ - **Models**
+ - Add ‘last success’ column to health check table - [PR](https://github.com/BerriAI/litellm/pull/11903)
+ - **MCP**
+ - New UI component to support auth types: api key, bearer token, basic auth - [PR](https://github.com/BerriAI/litellm/pull/11968) s/o [@wagnerjt](https://github.com/wagnerjt)
+ - Ensure internal users can access /mcp and /mcp/ routes - [PR](https://github.com/BerriAI/litellm/pull/12106)
+ - **SCIM**
+ - Ensure default_internal_user_params are applied for new users - [PR](https://github.com/BerriAI/litellm/pull/12015)
+ - **Team**
+ - Support default key expiry for team member keys - [PR](https://github.com/BerriAI/litellm/pull/12023)
+ - Expand team member add check to cover user email - [PR](https://github.com/BerriAI/litellm/pull/12082)
+ - **UI**
+ - Restrict UI access by SSO group - [PR](https://github.com/BerriAI/litellm/pull/12023)
+ - **Keys**
+ - Add new new_key param for regenerating key - [PR](https://github.com/BerriAI/litellm/pull/12087)
+ - **Test Keys**
+ - New ‘get code’ button for getting runnable python code snippet based on ui configuration - [PR](https://github.com/BerriAI/litellm/pull/11629)
+
+---
+
+## Logging / Guardrail Integrations
+
+#### Bugs
+ - **Braintrust**
+ - Adds model to metadata to enable braintrust cost estimation - [PR](https://github.com/BerriAI/litellm/pull/12022)
+#### Features
+ - **Callbacks**
+ - (Enterprise) - disable logging callbacks in request headers - [PR](https://github.com/BerriAI/litellm/pull/11985)
+ - Add List Callbacks API Endpoint - [PR](https://github.com/BerriAI/litellm/pull/11987)
+ - **Bedrock Guardrail**
+ - Don't raise exception on intervene action - [PR](https://github.com/BerriAI/litellm/pull/11875)
+ - Ensure PII Masking is applied on response streaming or non streaming content when using post call - [PR](https://github.com/BerriAI/litellm/pull/12086)
+ - **[NEW] Palo Alto Networks Prisma AIRS Guardrail**
+ - [PR](https://github.com/BerriAI/litellm/pull/12116)
+ - **ElasticSearch**
+ - New Elasticsearch Logging Tutorial - [PR](https://github.com/BerriAI/litellm/pull/11761)
+ - **Message Redaction**
+ - Preserve usage / model information for Embedding redaction - [PR](https://github.com/BerriAI/litellm/pull/12088)
+
+---
+
+## Performance / Loadbalancing / Reliability improvements
+
+#### Bugs
+ - **Team-only models**
+ - Filter team-only models from routing logic for non-team calls
+ - **Context Window Exceeded error**
+ - Catch anthropic exceptions - [PR](https://github.com/BerriAI/litellm/pull/12113)
+#### Features
+ - **Router**
+ - allow using dynamic cooldown time for a specific deployment - [PR](https://github.com/BerriAI/litellm/pull/12037)
+ - handle cooldown_time = 0 for deployments - [PR](https://github.com/BerriAI/litellm/pull/12108)
+ - **Redis**
+ - Add better debugging to see what variables are set - [PR](https://github.com/BerriAI/litellm/pull/12073)
+
+---
+
+## General Proxy Improvements
+
+#### Bugs
+ - **aiohttp**
+ - Check HTTP_PROXY vars in networking requests
+ - Allow using HTTP_ Proxy settings with trust_env
+
+#### Features
+ - **Docs**
+ - Add recommended spec - [PR](https://github.com/BerriAI/litellm/pull/11980)
+ - **Swagger**
+ - Introduce new environment variable NO_REDOC to opt-out Redoc - [PR](https://github.com/BerriAI/litellm/pull/12092)
+
+
+---
+
+## New Contributors
+* @mukesh-dream11 made their first contribution in https://github.com/BerriAI/litellm/pull/11969
+* @cbjuan made their first contribution in https://github.com/BerriAI/litellm/pull/11854
+* @ryan-castner made their first contribution in https://github.com/BerriAI/litellm/pull/12055
+* @davis-featherstone made their first contribution in https://github.com/BerriAI/litellm/pull/12075
+* @Gum-Joe made their first contribution in https://github.com/BerriAI/litellm/pull/12068
+* @jroberts2600 made their first contribution in https://github.com/BerriAI/litellm/pull/12116
+* @ohmeow made their first contribution in https://github.com/BerriAI/litellm/pull/12022
+* @amarrella made their first contribution in https://github.com/BerriAI/litellm/pull/11942
+* @zhangyoufu made their first contribution in https://github.com/BerriAI/litellm/pull/12092
+* @bougou made their first contribution in https://github.com/BerriAI/litellm/pull/12088
+* @codeugar made their first contribution in https://github.com/BerriAI/litellm/pull/11972
+* @glgh made their first contribution in https://github.com/BerriAI/litellm/pull/12133
+
+## **[Git Diff](https://github.com/BerriAI/litellm/compare/v1.73.0-stable...v1.73.6.rc-draft)**
diff --git a/enterprise/dist/litellm_enterprise-0.1.10-py3-none-any.whl b/enterprise/dist/litellm_enterprise-0.1.10-py3-none-any.whl
new file mode 100644
index 000000000000..473ff736e3a5
Binary files /dev/null and b/enterprise/dist/litellm_enterprise-0.1.10-py3-none-any.whl differ
diff --git a/enterprise/dist/litellm_enterprise-0.1.10.tar.gz b/enterprise/dist/litellm_enterprise-0.1.10.tar.gz
new file mode 100644
index 000000000000..e28ee65c3892
Binary files /dev/null and b/enterprise/dist/litellm_enterprise-0.1.10.tar.gz differ
diff --git a/enterprise/poetry.lock b/enterprise/poetry.lock
index f526fec8da08..bb436a168cda 100644
--- a/enterprise/poetry.lock
+++ b/enterprise/poetry.lock
@@ -1,7 +1,7 @@
-# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand.
+# This file is automatically @generated by Poetry 2.1.2 and should not be changed by hand.
package = []
[metadata]
-lock-version = "2.0"
+lock-version = "2.1"
python-versions = ">=3.8.1,<4.0, !=3.9.7"
content-hash = "2cf39473e67ff0615f0a61c9d2ac9f02b38cc08cbb1bdb893d89bee002646623"
diff --git a/enterprise/pyproject.toml b/enterprise/pyproject.toml
index a650eda22b15..3095245c6c73 100644
--- a/enterprise/pyproject.toml
+++ b/enterprise/pyproject.toml
@@ -1,6 +1,6 @@
[tool.poetry]
name = "litellm-enterprise"
-version = "0.1.9"
+version = "0.1.10"
description = "Package for LiteLLM Enterprise features"
authors = ["BerriAI"]
readme = "README.md"
@@ -22,7 +22,7 @@ requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"
[tool.commitizen]
-version = "0.1.9"
+version = "0.1.10"
version_files = [
"pyproject.toml:version",
"../requirements.txt:litellm-enterprise==",
diff --git a/litellm/__init__.py b/litellm/__init__.py
index fc8b8e6f44bd..0ec8a1e01b51 100644
--- a/litellm/__init__.py
+++ b/litellm/__init__.py
@@ -61,12 +61,8 @@
DEFAULT_ALLOWED_FAILS,
)
from litellm.types.guardrails import GuardrailItem
-from litellm.proxy._types import (
- KeyManagementSystem,
- KeyManagementSettings,
- LiteLLM_UpperboundKeyGenerateParams,
-)
-from litellm.types.proxy.management_endpoints.ui_sso import DefaultTeamSSOParams
+from litellm.types.secret_managers.main import KeyManagementSystem, KeyManagementSettings
+from litellm.types.proxy.management_endpoints.ui_sso import DefaultTeamSSOParams, LiteLLM_UpperboundKeyGenerateParams
from litellm.types.utils import StandardKeyGenerationConfig, LlmProviders
from litellm.integrations.custom_logger import CustomLogger
from litellm.litellm_core_utils.logging_callback_manager import LoggingCallbackManager
@@ -76,6 +72,7 @@
litellm_mode = os.getenv("LITELLM_MODE", "DEV") # "PRODUCTION", "DEV"
if litellm_mode == "DEV":
dotenv.load_dotenv()
+
##################################################
if set_verbose == True:
_turn_on_debug()
@@ -221,6 +218,7 @@
disable_token_counter: bool = False
disable_add_transform_inline_image_block: bool = False
disable_add_user_agent_to_request_tags: bool = False
+extra_spend_tag_headers: Optional[List[str]] = None
in_memory_llm_clients_cache: LLMClientCache = LLMClientCache()
safe_memory_mode: bool = False
enable_azure_ad_token_refresh: Optional[bool] = False
@@ -323,9 +321,11 @@
use_aiohttp_transport: bool = (
True # Older variable, aiohttp is now the default. use disable_aiohttp_transport instead.
)
-aiohttp_trust_env: bool = False # set to true to use HTTP_ Proxy settings
+aiohttp_trust_env: bool = False # set to true to use HTTP_ Proxy settings
disable_aiohttp_transport: bool = False # Set this to true to use httpx instead
-disable_aiohttp_trust_env: bool = False # When False, aiohttp will respect HTTP(S)_PROXY env vars
+disable_aiohttp_trust_env: bool = (
+ False # When False, aiohttp will respect HTTP(S)_PROXY env vars
+)
force_ipv4: bool = (
False # when True, litellm will force ipv4 for all LLM requests. Some users have seen httpx ConnectionError when using ipv6.
)
@@ -1157,6 +1157,7 @@ def add_known_models():
from .files.main import *
from .scheduler import *
from .cost_calculator import response_cost_calculator, cost_per_token
+
### ADAPTERS ###
from .types.adapter import AdapterItem
import litellm.anthropic_interface as anthropic
diff --git a/litellm/_service_logger.py b/litellm/_service_logger.py
index 969a9ef14836..3128f02f4093 100644
--- a/litellm/_service_logger.py
+++ b/litellm/_service_logger.py
@@ -4,7 +4,6 @@
import litellm
from litellm._logging import verbose_logger
-from litellm.proxy._types import UserAPIKeyAuth
from .integrations.custom_logger import CustomLogger
from .integrations.datadog.datadog import DataDogLogger
@@ -15,11 +14,14 @@
if TYPE_CHECKING:
from opentelemetry.trace import Span as _Span
+ from litellm.proxy._types import UserAPIKeyAuth
+
Span = Union[_Span, Any]
OTELClass = OpenTelemetry
else:
Span = Any
OTELClass = Any
+ UserAPIKeyAuth = Any
class ServiceLogging(CustomLogger):
diff --git a/litellm/google_genai/adapters/transformation.py b/litellm/google_genai/adapters/transformation.py
index 915e436b216a..e80da47d5ea3 100644
--- a/litellm/google_genai/adapters/transformation.py
+++ b/litellm/google_genai/adapters/transformation.py
@@ -26,50 +26,53 @@ class GoogleGenAIStreamWrapper(AdapterCompletionStreamWrapper):
Wrapper for streaming Google GenAI generate_content responses.
Transforms OpenAI streaming chunks to Google GenAI format.
"""
-
+
sent_first_chunk: bool = False
# State tracking for accumulating partial tool calls
accumulated_tool_calls: Dict[str, Dict[str, Any]]
-
+
def __init__(self, completion_stream: Any):
- super().__init__(completion_stream)
self.sent_first_chunk = False
self.accumulated_tool_calls = {}
-
+
def __next__(self):
try:
for chunk in self.completion_stream:
if chunk == "None" or chunk is None:
continue
-
+
# Transform OpenAI streaming chunk to Google GenAI format
- transformed_chunk = GoogleGenAIAdapter().translate_streaming_completion_to_generate_content(chunk, self)
+ transformed_chunk = GoogleGenAIAdapter().translate_streaming_completion_to_generate_content(
+ chunk, self
+ )
if transformed_chunk: # Only return non-empty chunks
return transformed_chunk
-
+
raise StopIteration
except StopIteration:
raise StopIteration
except Exception:
raise StopIteration
-
+
async def __anext__(self):
try:
async for chunk in self.completion_stream:
if chunk == "None" or chunk is None:
continue
-
- # Transform OpenAI streaming chunk to Google GenAI format
- transformed_chunk = GoogleGenAIAdapter().translate_streaming_completion_to_generate_content(chunk, self)
+
+ # Transform OpenAI streaming chunk to Google GenAI format
+ transformed_chunk = GoogleGenAIAdapter().translate_streaming_completion_to_generate_content(
+ chunk, self
+ )
if transformed_chunk: # Only return non-empty chunks
return transformed_chunk
-
+
raise StopAsyncIteration
except StopAsyncIteration:
raise StopAsyncIteration
except Exception:
raise StopAsyncIteration
-
+
def google_genai_sse_wrapper(self) -> Iterator[bytes]:
"""
Convert Google GenAI streaming chunks to Server-Sent Events format.
@@ -80,7 +83,7 @@ def google_genai_sse_wrapper(self) -> Iterator[bytes]:
yield payload.encode()
else:
yield chunk
-
+
async def async_google_genai_sse_wrapper(self) -> AsyncIterator[bytes]:
"""
Async version of google_genai_sse_wrapper.
@@ -95,40 +98,39 @@ async def async_google_genai_sse_wrapper(self) -> AsyncIterator[bytes]:
class GoogleGenAIAdapter:
"""Adapter for transforming Google GenAI generate_content requests to/from litellm.completion format"""
-
+
def __init__(self) -> None:
pass
-
def translate_generate_content_to_completion(
self,
model: str,
contents: Union[List[Dict[str, Any]], Dict[str, Any]],
config: Optional[Dict[str, Any]] = None,
- **kwargs
+ **kwargs,
) -> ChatCompletionRequest:
"""
Transform generate_content request to litellm completion format
-
+
Args:
model: The model name
contents: Generate content contents (can be list or single dict)
config: Optional config parameters
**kwargs: Additional parameters
-
+
Returns:
ChatCompletionRequest in OpenAI format
"""
-
+
# Normalize contents to list format
if isinstance(contents, dict):
contents_list = [contents]
else:
contents_list = contents
-
+
# Transform contents to OpenAI messages format
messages = self._transform_contents_to_messages(contents_list)
-
+
# Create base request
completion_request: ChatCompletionRequest = ChatCompletionRequest(
model=model,
@@ -146,7 +148,7 @@ def translate_generate_content_to_completion(
# - tools
# - tool_choice
#########################################################
-
+
# Add config parameters if provided
if config:
# Map common Google GenAI config parameters to OpenAI equivalents
@@ -161,89 +163,91 @@ def translate_generate_content_to_completion(
pass
if "stopSequences" in config:
completion_request["stop"] = config["stopSequences"]
-
+
# Handle tools transformation
if "tools" in kwargs:
tools = kwargs["tools"]
-
+
# Check if tools are already in OpenAI format or Google GenAI format
if isinstance(tools, list) and len(tools) > 0:
# Tools are in Google GenAI format, transform them
openai_tools = self._transform_google_genai_tools_to_openai(tools)
if openai_tools:
completion_request["tools"] = openai_tools
-
+
# Handle tool_config (tool choice)
if "tool_config" in kwargs:
- tool_choice = self._transform_google_genai_tool_config_to_openai(kwargs["tool_config"])
+ tool_choice = self._transform_google_genai_tool_config_to_openai(
+ kwargs["tool_config"]
+ )
if tool_choice:
completion_request["tool_choice"] = tool_choice
-
+
return completion_request
def translate_completion_output_params_streaming(
self, completion_stream: Any
) -> Union[AsyncIterator[bytes], None]:
"""Transform streaming completion output to Google GenAI format"""
- google_genai_wrapper = GoogleGenAIStreamWrapper(completion_stream=completion_stream)
+ google_genai_wrapper = GoogleGenAIStreamWrapper(
+ completion_stream=completion_stream
+ )
# Return the SSE-wrapped version for proper event formatting
return google_genai_wrapper.async_google_genai_sse_wrapper()
- def _transform_google_genai_tools_to_openai(self, tools: List[Dict[str, Any]]) -> List[ChatCompletionToolParam]:
+ def _transform_google_genai_tools_to_openai(
+ self, tools: List[Dict[str, Any]]
+ ) -> List[ChatCompletionToolParam]:
"""Transform Google GenAI tools to OpenAI tools format"""
openai_tools: List[Dict[str, Any]] = []
-
+
for tool in tools:
if "functionDeclarations" in tool:
for func_decl in tool["functionDeclarations"]:
function_chunk: Dict[str, Any] = {
"name": func_decl.get("name", ""),
}
-
+
if "description" in func_decl:
function_chunk["description"] = func_decl["description"]
if "parameters" in func_decl:
function_chunk["parameters"] = func_decl["parameters"]
-
- openai_tool = {
- "type": "function",
- "function": function_chunk
- }
+
+ openai_tool = {"type": "function", "function": function_chunk}
openai_tools.append(openai_tool)
-
# normalize the tool schemas
normalized_tools = [normalize_tool_schema(tool) for tool in openai_tools]
-
+
return cast(List[ChatCompletionToolParam], normalized_tools)
- def _transform_google_genai_tool_config_to_openai(self, tool_config: Dict[str, Any]) -> Optional[ChatCompletionToolChoiceValues]:
+ def _transform_google_genai_tool_config_to_openai(
+ self, tool_config: Dict[str, Any]
+ ) -> Optional[ChatCompletionToolChoiceValues]:
"""Transform Google GenAI tool_config to OpenAI tool_choice"""
function_calling_config = tool_config.get("functionCallingConfig", {})
mode = function_calling_config.get("mode", "AUTO")
-
- mode_mapping = {
- "AUTO": "auto",
- "ANY": "required",
- "NONE": "none"
- }
-
+
+ mode_mapping = {"AUTO": "auto", "ANY": "required", "NONE": "none"}
+
tool_choice = mode_mapping.get(mode, "auto")
return cast(ChatCompletionToolChoiceValues, tool_choice)
- def _transform_contents_to_messages(self, contents: List[Dict[str, Any]]) -> List[AllMessageValues]:
+ def _transform_contents_to_messages(
+ self, contents: List[Dict[str, Any]]
+ ) -> List[AllMessageValues]:
"""Transform Google GenAI contents to OpenAI messages format"""
messages: List[AllMessageValues] = []
-
+
for content in contents:
role = content.get("role", "user")
parts = content.get("parts", [])
-
+
if role == "user":
# Handle user messages with potential function responses
combined_text = ""
tool_messages: List[ChatCompletionToolMessage] = []
-
+
for part in parts:
if isinstance(part, dict):
if "text" in part:
@@ -254,27 +258,26 @@ def _transform_contents_to_messages(self, contents: List[Dict[str, Any]]) -> Lis
tool_message = ChatCompletionToolMessage(
role="tool",
tool_call_id=f"call_{func_response.get('name', 'unknown')}",
- content=json.dumps(func_response.get("response", {}))
+ content=json.dumps(func_response.get("response", {})),
)
tool_messages.append(tool_message)
elif isinstance(part, str):
combined_text += part
-
+
# Add user message if there's text content
if combined_text:
- messages.append(ChatCompletionUserMessage(
- role="user",
- content=combined_text
- ))
-
+ messages.append(
+ ChatCompletionUserMessage(role="user", content=combined_text)
+ )
+
# Add tool messages
messages.extend(tool_messages)
-
+
elif role == "model":
# Handle assistant messages with potential function calls
combined_text = ""
tool_calls: List[ChatCompletionAssistantToolCall] = []
-
+
for part in parts:
if isinstance(part, dict):
if "text" in part:
@@ -287,28 +290,28 @@ def _transform_contents_to_messages(self, contents: List[Dict[str, Any]]) -> Lis
type="function",
function=ChatCompletionToolCallFunctionChunk(
name=func_call.get("name", ""),
- arguments=json.dumps(func_call.get("args", {}))
- )
+ arguments=json.dumps(func_call.get("args", {})),
+ ),
)
tool_calls.append(tool_call)
elif isinstance(part, str):
combined_text += part
-
+
# Create assistant message
if tool_calls:
assistant_message = ChatCompletionAssistantMessage(
role="assistant",
content=combined_text if combined_text else None,
- tool_calls=tool_calls
+ tool_calls=tool_calls,
)
else:
assistant_message = ChatCompletionAssistantMessage(
role="assistant",
- content=combined_text if combined_text else None
+ content=combined_text if combined_text else None,
)
-
+
messages.append(assistant_message)
-
+
return messages
def translate_completion_to_generate_content(
@@ -316,57 +319,62 @@ def translate_completion_to_generate_content(
) -> Dict[str, Any]:
"""
Transform litellm completion response to Google GenAI generate_content format
-
+
Args:
response: ModelResponse from litellm.completion
-
+
Returns:
Dict in Google GenAI generate_content response format
"""
-
+
# Extract the main response content
choice = response.choices[0] if response.choices else None
if not choice:
raise ValueError("Invalid completion response: no choices found")
-
+
# Handle different choice types (Choices vs StreamingChoices)
if isinstance(choice, Choices):
if not choice.message:
- raise ValueError("Invalid completion response: no message found in choice")
+ raise ValueError(
+ "Invalid completion response: no message found in choice"
+ )
parts = self._transform_openai_message_to_google_genai_parts(choice.message)
elif isinstance(choice, StreamingChoices):
if not choice.delta:
- raise ValueError("Invalid completion response: no delta found in streaming choice")
+ raise ValueError(
+ "Invalid completion response: no delta found in streaming choice"
+ )
parts = self._transform_openai_delta_to_google_genai_parts(choice.delta)
else:
# Fallback for generic choice objects
- message_content = getattr(choice, 'message', {}).get('content', '') or getattr(choice, 'delta', {}).get('content', '')
+ message_content = getattr(choice, "message", {}).get(
+ "content", ""
+ ) or getattr(choice, "delta", {}).get("content", "")
parts = [{"text": message_content}] if message_content else []
-
+
# Create Google GenAI format response
generate_content_response: Dict[str, Any] = {
"candidates": [
{
- "content": {
- "parts": parts,
- "role": "model"
- },
- "finishReason": self._map_finish_reason(getattr(choice, 'finish_reason', None)),
+ "content": {"parts": parts, "role": "model"},
+ "finishReason": self._map_finish_reason(
+ getattr(choice, "finish_reason", None)
+ ),
"index": 0,
- "safetyRatings": []
+ "safetyRatings": [],
}
],
"usageMetadata": (
- self._map_usage(getattr(response, 'usage', None))
- if hasattr(response, 'usage') and getattr(response, 'usage', None)
+ self._map_usage(getattr(response, "usage", None))
+ if hasattr(response, "usage") and getattr(response, "usage", None)
else {
"promptTokenCount": 0,
"candidatesTokenCount": 0,
- "totalTokenCount": 0
+ "totalTokenCount": 0,
}
- )
+ ),
}
-
+
# Add text field for convenience (common in Google GenAI responses)
text_content = ""
for part in parts:
@@ -374,7 +382,7 @@ def translate_completion_to_generate_content(
text_content += part["text"]
if text_content:
generate_content_response["text"] = text_content
-
+
return generate_content_response
def translate_streaming_completion_to_generate_content(
@@ -382,62 +390,69 @@ def translate_streaming_completion_to_generate_content(
) -> Dict[str, Any]:
"""
Transform streaming litellm completion chunk to Google GenAI generate_content format
-
+
Args:
response: Streaming ModelResponse chunk from litellm.completion
wrapper: GoogleGenAIStreamWrapper instance
-
+
Returns:
Dict in Google GenAI streaming generate_content response format
"""
-
+
# Extract the main response content from streaming chunk
choice = response.choices[0] if response.choices else None
if not choice:
# Return empty chunk if no choices
return {}
-
+
# Handle streaming choice
if isinstance(choice, StreamingChoices):
if choice.delta:
- parts = self._transform_openai_delta_to_google_genai_parts_with_accumulation(choice.delta, wrapper)
+ parts = self._transform_openai_delta_to_google_genai_parts_with_accumulation(
+ choice.delta, wrapper
+ )
else:
parts = []
- finish_reason = getattr(choice, 'finish_reason', None)
+ finish_reason = getattr(choice, "finish_reason", None)
else:
# Fallback for generic choice objects
- message_content = getattr(choice, 'delta', {}).get('content', '')
+ message_content = getattr(choice, "delta", {}).get("content", "")
parts = [{"text": message_content}] if message_content else []
- finish_reason = getattr(choice, 'finish_reason', None)
-
+ finish_reason = getattr(choice, "finish_reason", None)
+
# Only create response chunk if we have parts or it's the final chunk
if not parts and not finish_reason:
return {}
-
+
# Create Google GenAI streaming format response
streaming_chunk: Dict[str, Any] = {
"candidates": [
{
- "content": {
- "parts": parts,
- "role": "model"
- },
- "finishReason": self._map_finish_reason(finish_reason) if finish_reason else None,
+ "content": {"parts": parts, "role": "model"},
+ "finishReason": (
+ self._map_finish_reason(finish_reason)
+ if finish_reason
+ else None
+ ),
"index": 0,
- "safetyRatings": []
+ "safetyRatings": [],
}
]
}
-
+
# Add usage metadata only in the final chunk (when finish_reason is present)
if finish_reason:
- usage_metadata = self._map_usage(getattr(response, 'usage', None)) if hasattr(response, 'usage') and getattr(response, 'usage', None) else {
- "promptTokenCount": 0,
- "candidatesTokenCount": 0,
- "totalTokenCount": 0
- }
+ usage_metadata = (
+ self._map_usage(getattr(response, "usage", None))
+ if hasattr(response, "usage") and getattr(response, "usage", None)
+ else {
+ "promptTokenCount": 0,
+ "candidatesTokenCount": 0,
+ "totalTokenCount": 0,
+ }
+ )
streaming_chunk["usageMetadata"] = usage_metadata
-
+
# Add text field for convenience (common in Google GenAI responses)
text_content = ""
for part in parts:
@@ -445,64 +460,69 @@ def translate_streaming_completion_to_generate_content(
text_content += part["text"]
if text_content:
streaming_chunk["text"] = text_content
-
+
return streaming_chunk
- def _transform_openai_message_to_google_genai_parts(self, message: Any) -> List[Dict[str, Any]]:
+ def _transform_openai_message_to_google_genai_parts(
+ self, message: Any
+ ) -> List[Dict[str, Any]]:
"""Transform OpenAI message to Google GenAI parts format"""
parts: List[Dict[str, Any]] = []
-
+
# Add text content if present
- if hasattr(message, 'content') and message.content:
+ if hasattr(message, "content") and message.content:
parts.append({"text": message.content})
-
+
# Add tool calls if present
- if hasattr(message, 'tool_calls') and message.tool_calls:
+ if hasattr(message, "tool_calls") and message.tool_calls:
for tool_call in message.tool_calls:
- if hasattr(tool_call, 'function') and tool_call.function:
+ if hasattr(tool_call, "function") and tool_call.function:
try:
- args = json.loads(tool_call.function.arguments) if tool_call.function.arguments else {}
+ args = (
+ json.loads(tool_call.function.arguments)
+ if tool_call.function.arguments
+ else {}
+ )
except json.JSONDecodeError:
args = {}
-
+
function_call_part = {
- "functionCall": {
- "name": tool_call.function.name,
- "args": args
- }
+ "functionCall": {"name": tool_call.function.name, "args": args}
}
parts.append(function_call_part)
-
+
return parts if parts else [{"text": ""}]
- def _transform_openai_delta_to_google_genai_parts(self, delta: Any) -> List[Dict[str, Any]]:
+ def _transform_openai_delta_to_google_genai_parts(
+ self, delta: Any
+ ) -> List[Dict[str, Any]]:
"""Transform OpenAI delta to Google GenAI parts format for streaming"""
parts: List[Dict[str, Any]] = []
-
+
# Add text content if present
- if hasattr(delta, 'content') and delta.content:
+ if hasattr(delta, "content") and delta.content:
parts.append({"text": delta.content})
-
+
# Add tool calls if present (for streaming tool calls)
- if hasattr(delta, 'tool_calls') and delta.tool_calls:
+ if hasattr(delta, "tool_calls") and delta.tool_calls:
for tool_call in delta.tool_calls:
- if hasattr(tool_call, 'function') and tool_call.function:
+ if hasattr(tool_call, "function") and tool_call.function:
# For streaming, we might get partial function arguments
- args_str = getattr(tool_call.function, 'arguments', '') or ''
+ args_str = getattr(tool_call.function, "arguments", "") or ""
try:
args = json.loads(args_str) if args_str else {}
except json.JSONDecodeError:
# For partial JSON in streaming, return as text for now
args = {"partial": args_str}
-
+
function_call_part = {
"functionCall": {
- "name": getattr(tool_call.function, 'name', '') or '',
- "args": args
+ "name": getattr(tool_call.function, "name", "") or "",
+ "args": args,
}
}
parts.append(function_call_part)
-
+
return parts
def _transform_openai_delta_to_google_genai_parts_with_accumulation(
@@ -510,74 +530,84 @@ def _transform_openai_delta_to_google_genai_parts_with_accumulation(
) -> List[Dict[str, Any]]:
"""Transform OpenAI delta to Google GenAI parts format with tool call accumulation"""
parts: List[Dict[str, Any]] = []
-
+
# Add text content if present
- if hasattr(delta, 'content') and delta.content:
+ if hasattr(delta, "content") and delta.content:
parts.append({"text": delta.content})
-
+
# Handle tool calls with accumulation for streaming
- if hasattr(delta, 'tool_calls') and delta.tool_calls:
+ if hasattr(delta, "tool_calls") and delta.tool_calls:
for tool_call in delta.tool_calls:
- if hasattr(tool_call, 'function') and tool_call.function:
- tool_call_id = getattr(tool_call, 'id', '') or 'call_unknown'
- function_name = getattr(tool_call.function, 'name', '') or ''
- args_str = getattr(tool_call.function, 'arguments', '') or ''
-
+ if hasattr(tool_call, "function") and tool_call.function:
+ tool_call_id = getattr(tool_call, "id", "") or "call_unknown"
+ function_name = getattr(tool_call.function, "name", "") or ""
+ args_str = getattr(tool_call.function, "arguments", "") or ""
+
# Initialize accumulation for this tool call if not exists
if tool_call_id not in wrapper.accumulated_tool_calls:
wrapper.accumulated_tool_calls[tool_call_id] = {
- 'name': '',
- 'arguments': '',
- 'complete': False
+ "name": "",
+ "arguments": "",
+ "complete": False,
}
-
+
# Accumulate function name if provided
if function_name:
- wrapper.accumulated_tool_calls[tool_call_id]['name'] = function_name
-
+ wrapper.accumulated_tool_calls[tool_call_id][
+ "name"
+ ] = function_name
+
# Accumulate arguments if provided
if args_str:
- wrapper.accumulated_tool_calls[tool_call_id]['arguments'] += args_str
-
+ wrapper.accumulated_tool_calls[tool_call_id][
+ "arguments"
+ ] += args_str
+
# Try to parse the accumulated arguments as JSON
- accumulated_args = wrapper.accumulated_tool_calls[tool_call_id]['arguments']
+ accumulated_args = wrapper.accumulated_tool_calls[tool_call_id][
+ "arguments"
+ ]
try:
if accumulated_args:
parsed_args = json.loads(accumulated_args)
# JSON is valid, mark as complete and create function call part
- wrapper.accumulated_tool_calls[tool_call_id]['complete'] = True
-
+ wrapper.accumulated_tool_calls[tool_call_id][
+ "complete"
+ ] = True
+
function_call_part = {
"functionCall": {
- "name": wrapper.accumulated_tool_calls[tool_call_id]['name'],
- "args": parsed_args
+ "name": wrapper.accumulated_tool_calls[
+ tool_call_id
+ ]["name"],
+ "args": parsed_args,
}
}
parts.append(function_call_part)
-
+
# Clean up completed tool call
del wrapper.accumulated_tool_calls[tool_call_id]
-
+
except json.JSONDecodeError:
# JSON is still incomplete, continue accumulating
# Don't add to parts yet
pass
-
+
return parts
def _map_finish_reason(self, finish_reason: Optional[str]) -> str:
"""Map OpenAI finish reasons to Google GenAI finish reasons"""
if not finish_reason:
return "STOP"
-
+
mapping = {
"stop": "STOP",
- "length": "MAX_TOKENS",
+ "length": "MAX_TOKENS",
"content_filter": "SAFETY",
"tool_calls": "STOP",
"function_call": "STOP",
}
-
+
return mapping.get(finish_reason, "STOP")
def _map_usage(self, usage: Any) -> Dict[str, int]:
@@ -586,4 +616,4 @@ def _map_usage(self, usage: Any) -> Dict[str, int]:
"promptTokenCount": getattr(usage, "prompt_tokens", 0) or 0,
"candidatesTokenCount": getattr(usage, "completion_tokens", 0) or 0,
"totalTokenCount": getattr(usage, "total_tokens", 0) or 0,
- }
\ No newline at end of file
+ }
diff --git a/litellm/integrations/custom_logger.py b/litellm/integrations/custom_logger.py
index ce97b9a292d1..1cbcd360ce99 100644
--- a/litellm/integrations/custom_logger.py
+++ b/litellm/integrations/custom_logger.py
@@ -16,7 +16,6 @@
from pydantic import BaseModel
from litellm.caching.caching import DualCache
-from litellm.proxy._types import UserAPIKeyAuth
from litellm.types.integrations.argilla import ArgillaItem
from litellm.types.llms.openai import AllMessageValues, ChatCompletionRequest
from litellm.types.utils import (
@@ -33,11 +32,13 @@
from opentelemetry.trace import Span as _Span
from litellm.litellm_core_utils.litellm_logging import Logging as LiteLLMLoggingObj
+ from litellm.proxy._types import UserAPIKeyAuth
Span = Union[_Span, Any]
else:
Span = Any
LiteLLMLoggingObj = Any
+ UserAPIKeyAuth = Any
class CustomLogger: # https://docs.litellm.ai/docs/observability/custom_callback#callback-class
diff --git a/litellm/litellm_core_utils/litellm_logging.py b/litellm/litellm_core_utils/litellm_logging.py
index ddbbce96462d..f7aa59db973b 100644
--- a/litellm/litellm_core_utils/litellm_logging.py
+++ b/litellm/litellm_core_utils/litellm_logging.py
@@ -116,7 +116,6 @@
from ..integrations.arize.arize_phoenix import ArizePhoenixLogger
from ..integrations.athina import AthinaLogger
from ..integrations.azure_storage.azure_storage import AzureBlobStorageLogger
-from ..integrations.braintrust_logging import BraintrustLogger
from ..integrations.custom_prompt_management import CustomPromptManagement
from ..integrations.datadog.datadog import DataDogLogger
from ..integrations.datadog.datadog_llm_obs import DataDogLLMObsLogger
@@ -144,7 +143,6 @@
from ..integrations.s3_v2 import S3Logger as S3V2Logger
from ..integrations.supabase import Supabase
from ..integrations.traceloop import TraceloopLogger
-from ..integrations.weights_biases import WeightsBiasesLogger
from .exception_mapping_utils import _get_response_headers
from .initialize_dynamic_callback_params import (
initialize_standard_callback_dynamic_params as _initialize_standard_callback_dynamic_params,
@@ -3022,6 +3020,7 @@ def set_callbacks(callback_list, function_id=None): # noqa: PLR0915
elif callback == "s3":
s3Logger = S3Logger()
elif callback == "wandb":
+ from litellm.integrations.weights_biases import WeightsBiasesLogger
weightsBiasesLogger = WeightsBiasesLogger()
elif callback == "logfire":
logfireLogger = LogfireLogger()
@@ -3075,6 +3074,7 @@ def _init_custom_logger_compatible_class( # noqa: PLR0915
_in_memory_loggers.append(_openmeter_logger)
return _openmeter_logger # type: ignore
elif logging_integration == "braintrust":
+ from litellm.integrations.braintrust_logging import BraintrustLogger
for callback in _in_memory_loggers:
if isinstance(callback, BraintrustLogger):
return callback # type: ignore
@@ -3432,6 +3432,7 @@ def get_custom_logger_compatible_class( # noqa: PLR0915
if isinstance(callback, OpenMeterLogger):
return callback
elif logging_integration == "braintrust":
+ from litellm.integrations.braintrust_logging import BraintrustLogger
for callback in _in_memory_loggers:
if isinstance(callback, BraintrustLogger):
return callback
@@ -4023,6 +4024,27 @@ def _get_user_agent_tags(proxy_server_request: dict) -> Optional[List[str]]:
user_agent_tags.append("User-Agent: " + user_agent)
return user_agent_tags
+ @staticmethod
+ def _get_extra_header_tags(proxy_server_request: dict) -> Optional[List[str]]:
+ """
+ Extract additional header tags for spend tracking based on config.
+ """
+ extra_headers: List[str] = litellm.extra_spend_tag_headers or []
+ if not extra_headers:
+ return None
+
+ headers = proxy_server_request.get("headers", {})
+ if not isinstance(headers, dict):
+ return None
+
+ header_tags = []
+ for header_name in extra_headers:
+ header_value = headers.get(header_name)
+ if header_value:
+ header_tags.append(f"{header_name}: {header_value}")
+
+ return header_tags if header_tags else None
+
@staticmethod
def _get_request_tags(metadata: dict, proxy_server_request: dict) -> List[str]:
request_tags = (
@@ -4033,8 +4055,13 @@ def _get_request_tags(metadata: dict, proxy_server_request: dict) -> List[str]:
user_agent_tags = StandardLoggingPayloadSetup._get_user_agent_tags(
proxy_server_request
)
+ additional_header_tags = StandardLoggingPayloadSetup._get_extra_header_tags(
+ proxy_server_request
+ )
if user_agent_tags is not None:
request_tags.extend(user_agent_tags)
+ if additional_header_tags is not None:
+ request_tags.extend(additional_header_tags)
return request_tags
diff --git a/litellm/litellm_core_utils/llm_cost_calc/tool_call_cost_tracking.py b/litellm/litellm_core_utils/llm_cost_calc/tool_call_cost_tracking.py
index 4ecc4a337020..75bb699292e0 100644
--- a/litellm/litellm_core_utils/llm_cost_calc/tool_call_cost_tracking.py
+++ b/litellm/litellm_core_utils/llm_cost_calc/tool_call_cost_tracking.py
@@ -248,7 +248,7 @@ def _get_code_interpreter_cost(
)
@staticmethod
- def _extract_token_counts(computer_use_usage: Any) -> tuple[Optional[int], Optional[int]]:
+ def _extract_token_counts(computer_use_usage: Any) -> Tuple[Optional[int], Optional[int]]:
"""Extract and convert token counts safely."""
input_tokens = None
output_tokens = None
diff --git a/litellm/litellm_core_utils/streaming_chunk_builder_utils.py b/litellm/litellm_core_utils/streaming_chunk_builder_utils.py
index 4068d2e043cd..0517d27e299b 100644
--- a/litellm/litellm_core_utils/streaming_chunk_builder_utils.py
+++ b/litellm/litellm_core_utils/streaming_chunk_builder_utils.py
@@ -107,9 +107,9 @@ def get_combined_tool_content(
self, tool_call_chunks: List[Dict[str, Any]]
) -> List[ChatCompletionMessageToolCall]:
tool_calls_list: List[ChatCompletionMessageToolCall] = []
- tool_call_map: Dict[
- int, Dict[str, Any]
- ] = {} # Map to store tool calls by index
+ tool_call_map: Dict[int, Dict[str, Any]] = (
+ {}
+ ) # Map to store tool calls by index
for chunk in tool_call_chunks:
choices = chunk["choices"]
@@ -415,6 +415,8 @@ def calculate_usage(
if prompt_tokens_details is not None:
returned_usage.prompt_tokens_details = prompt_tokens_details
+ # Return a new usage object with the new values
+ returned_usage = Usage(**returned_usage.model_dump())
return returned_usage
diff --git a/litellm/litellm_core_utils/streaming_handler.py b/litellm/litellm_core_utils/streaming_handler.py
index 98fb94922f32..eeccfe533356 100644
--- a/litellm/litellm_core_utils/streaming_handler.py
+++ b/litellm/litellm_core_utils/streaming_handler.py
@@ -758,6 +758,7 @@ def return_processed_chunk_logic( # noqa
is_chunk_non_empty = self.is_chunk_non_empty(
completion_obj, model_response, response_obj
)
+
if (
is_chunk_non_empty
): # cannot set content of an OpenAI Object to be an empty string
@@ -1203,6 +1204,7 @@ def chunk_creator(self, chunk: Any): # type: ignore # noqa: PLR0915
if response_obj is None:
return
completion_obj["content"] = response_obj["text"]
+ self.received_finish_reason = response_obj.get("finish_reason", None)
if response_obj["is_finished"]:
if response_obj["finish_reason"] == "error":
raise Exception(
@@ -1210,7 +1212,6 @@ def chunk_creator(self, chunk: Any): # type: ignore # noqa: PLR0915
self.custom_llm_provider, response_obj
)
)
- self.received_finish_reason = response_obj["finish_reason"]
if response_obj.get("original_chunk", None) is not None:
if hasattr(response_obj["original_chunk"], "id"):
model_response = self.set_model_id(
diff --git a/litellm/llms/anthropic/experimental_pass_through/adapters/handler.py b/litellm/llms/anthropic/experimental_pass_through/adapters/handler.py
index 3c85f5327e88..fad895a6253a 100644
--- a/litellm/llms/anthropic/experimental_pass_through/adapters/handler.py
+++ b/litellm/llms/anthropic/experimental_pass_through/adapters/handler.py
@@ -153,7 +153,8 @@ async def async_anthropic_messages_handler(
if stream:
transformed_stream = (
ANTHROPIC_ADAPTER.translate_completion_output_params_streaming(
- completion_response
+ completion_response,
+ model=model,
)
)
if transformed_stream is not None:
@@ -239,7 +240,8 @@ def anthropic_messages_handler(
if stream:
transformed_stream = (
ANTHROPIC_ADAPTER.translate_completion_output_params_streaming(
- completion_response
+ completion_response,
+ model=model,
)
)
if transformed_stream is not None:
diff --git a/litellm/llms/anthropic/experimental_pass_through/adapters/streaming_iterator.py b/litellm/llms/anthropic/experimental_pass_through/adapters/streaming_iterator.py
index 070069988c2f..662c42f0d7e0 100644
--- a/litellm/llms/anthropic/experimental_pass_through/adapters/streaming_iterator.py
+++ b/litellm/llms/anthropic/experimental_pass_through/adapters/streaming_iterator.py
@@ -2,6 +2,7 @@
## Translates OpenAI call to Anthropic `/v1/messages` format
import json
import traceback
+import uuid
from typing import Any, AsyncIterator, Iterator, Optional
from litellm import verbose_logger
@@ -16,6 +17,10 @@ class AnthropicStreamWrapper(AdapterCompletionStreamWrapper):
- finish_reason must map exactly to anthropic reason, else anthropic client won't be able to parse it.
"""
+ def __init__(self, completion_stream: Any, model: str):
+ super().__init__(completion_stream)
+ self.model = model
+
sent_first_chunk: bool = False
sent_content_block_start: bool = False
sent_content_block_finish: bool = False
@@ -31,11 +36,11 @@ def __next__(self):
return {
"type": "message_start",
"message": {
- "id": "msg_1nZdL29xx5MUA1yADyHTEsnR8uuvGzszyY",
+ "id": "msg_{}".format(uuid.uuid4()),
"type": "message",
"role": "assistant",
"content": [],
- "model": "claude-3-5-sonnet-20240620",
+ "model": self.model,
"stop_reason": None,
"stop_sequence": None,
"usage": UsageDelta(input_tokens=0, output_tokens=0),
@@ -100,11 +105,11 @@ async def __anext__(self):
return {
"type": "message_start",
"message": {
- "id": "msg_1nZdL29xx5MUA1yADyHTEsnR8uuvGzszyY",
+ "id": "msg_{}".format(uuid.uuid4()),
"type": "message",
"role": "assistant",
"content": [],
- "model": "claude-3-5-sonnet-20240620",
+ "model": self.model,
"stop_reason": None,
"stop_sequence": None,
"usage": UsageDelta(input_tokens=0, output_tokens=0),
diff --git a/litellm/llms/anthropic/experimental_pass_through/adapters/transformation.py b/litellm/llms/anthropic/experimental_pass_through/adapters/transformation.py
index 0cddb65ddc41..369c668234f6 100644
--- a/litellm/llms/anthropic/experimental_pass_through/adapters/transformation.py
+++ b/litellm/llms/anthropic/experimental_pass_through/adapters/transformation.py
@@ -96,9 +96,11 @@ def translate_completion_output_params(
)
def translate_completion_output_params_streaming(
- self, completion_stream: Any
+ self, completion_stream: Any, model: str
) -> Union[AsyncIterator[bytes], None]:
- anthropic_wrapper = AnthropicStreamWrapper(completion_stream=completion_stream)
+ anthropic_wrapper = AnthropicStreamWrapper(
+ completion_stream=completion_stream, model=model
+ )
# Return the SSE-wrapped version for proper event formatting
return anthropic_wrapper.async_anthropic_sse_wrapper()
diff --git a/litellm/llms/azure/chat/o_series_transformation.py b/litellm/llms/azure/chat/o_series_transformation.py
index 767f2d46df3d..778ec5f6deae 100644
--- a/litellm/llms/azure/chat/o_series_transformation.py
+++ b/litellm/llms/azure/chat/o_series_transformation.py
@@ -38,13 +38,38 @@ def get_supported_openai_params(self, model: str) -> list:
"top_logprobs",
]
- o_series_only_param = []
- if supports_reasoning(model):
- o_series_only_param.append("reasoning_effort")
+ o_series_only_param = self._get_o_series_only_params(model)
+
all_openai_params.extend(o_series_only_param)
return [
param for param in all_openai_params if param not in non_supported_params
]
+
+ def _get_o_series_only_params(self, model: str) -> list:
+ """
+ Helper function to get the o-series only params for the model
+
+ - reasoning_effort
+ """
+ o_series_only_param = []
+
+
+ #########################################################
+ # Case 1: If the model is recognized and in litellm model cost map
+ # then check if it supports reasoning
+ #########################################################
+ if model in litellm.model_list_set:
+ if supports_reasoning(model):
+ o_series_only_param.append("reasoning_effort")
+ #########################################################
+ # Case 2: If the model is not recognized, then we assume it supports reasoning
+ # This is critical because several users tend to use custom deployment names
+ # for azure o-series models.
+ #########################################################
+ else:
+ o_series_only_param.append("reasoning_effort")
+
+ return o_series_only_param
def should_fake_stream(
self,
diff --git a/litellm/llms/base_llm/anthropic_messages/transformation.py b/litellm/llms/base_llm/anthropic_messages/transformation.py
index d07285800768..63f4f2300342 100644
--- a/litellm/llms/base_llm/anthropic_messages/transformation.py
+++ b/litellm/llms/base_llm/anthropic_messages/transformation.py
@@ -115,6 +115,7 @@ def get_error_class(
self, error_message: str, status_code: int, headers: Union[dict, httpx.Headers]
) -> "BaseLLMException":
from litellm.llms.base_llm.chat.transformation import BaseLLMException
+
return BaseLLMException(
message=error_message, status_code=status_code, headers=headers
)
diff --git a/litellm/llms/vertex_ai/gemini/vertex_and_google_ai_studio_gemini.py b/litellm/llms/vertex_ai/gemini/vertex_and_google_ai_studio_gemini.py
index 20cf076d4158..bc8844bdcb0f 100644
--- a/litellm/llms/vertex_ai/gemini/vertex_and_google_ai_studio_gemini.py
+++ b/litellm/llms/vertex_ai/gemini/vertex_and_google_ai_studio_gemini.py
@@ -1077,8 +1077,10 @@ def _check_finish_reason(
elif (
finish_reason and finish_reason in mapped_finish_reason.keys()
): # vertex ai
+
return mapped_finish_reason[finish_reason]
else:
+
return "stop"
@staticmethod
@@ -1261,7 +1263,6 @@ def transform_response(
status_code=422,
headers=raw_response.headers,
)
-
return self._transform_google_generate_content_to_openai_model_response(
completion_response=completion_response,
@@ -1270,7 +1271,6 @@ def transform_response(
logging_obj=logging_obj,
raw_response=raw_response,
)
-
def _transform_google_generate_content_to_openai_model_response(
self,
diff --git a/litellm/proxy/_experimental/out/_next/static/chunks/250-b776bd9ac8911291.js b/litellm/proxy/_experimental/out/_next/static/chunks/250-b776bd9ac8911291.js
index f0438132c4c0..adac15afa67a 100644
--- a/litellm/proxy/_experimental/out/_next/static/chunks/250-b776bd9ac8911291.js
+++ b/litellm/proxy/_experimental/out/_next/static/chunks/250-b776bd9ac8911291.js
@@ -1 +1 @@
-"use strict";(self.webpackChunk_N_E=self.webpackChunk_N_E||[]).push([[250],{19250:function(e,t,o){o.d(t,{$D:function(){return eJ},$I:function(){return Q},$o:function(){return tF},AZ:function(){return Y},Au:function(){return e_},BL:function(){return eZ},Br:function(){return b},Bw:function(){return tx},E9:function(){return eY},EB:function(){return th},EG:function(){return eW},EY:function(){return e0},Eb:function(){return N},FC:function(){return eh},Gh:function(){return eG},H1:function(){return A},H2:function(){return r},Hx:function(){return ej},I1:function(){return S},It:function(){return O},J$:function(){return ec},JO:function(){return B},K8:function(){return h},K_:function(){return eQ},Ko:function(){return tN},LY:function(){return eV},Lp:function(){return eR},MO:function(){return p},Mx:function(){return tf},N3:function(){return eb},N8:function(){return ea},NL:function(){return e9},NV:function(){return m},Nc:function(){return eP},Nz:function(){return e1},O3:function(){return eD},OD:function(){return eC},OU:function(){return eu},Of:function(){return F},Og:function(){return y},Ou:function(){return ty},Ov:function(){return C},Oz:function(){return tO},PC:function(){return e6},PT:function(){return K},PY:function(){return tv},Pj:function(){return tt},Pv:function(){return tm},Qg:function(){return ex},RQ:function(){return j},Rg:function(){return et},Sb:function(){return ez},So:function(){return er},TF:function(){return tu},Tj:function(){return e2},Tx:function(){return tB},U8:function(){return te},UM:function(){return tl},VA:function(){return I},Vt:function(){return eX},W_:function(){return M},X:function(){return es},XB:function(){return tg},XO:function(){return T},Xd:function(){return eS},Xm:function(){return x},YU:function(){return eH},Yi:function(){return tj},Yo:function(){return U},Z9:function(){return V},Zr:function(){return k},a6:function(){return P},aC:function(){return tw},ao:function(){return eK},b1:function(){return ew},cq:function(){return R},cu:function(){return eA},e2:function(){return eT},eH:function(){return W},eW:function(){return tC},eZ:function(){return eO},fE:function(){return td},fP:function(){return eo},fk:function(){return tS},g:function(){return e4},gX:function(){return eB},gl:function(){return to},h3:function(){return ed},hT:function(){return ev},hy:function(){return f},ix:function(){return X},j2:function(){return ei},jA:function(){return e$},jE:function(){return eM},jr:function(){return tT},kK:function(){return g},kn:function(){return $},lP:function(){return w},lU:function(){return tr},lg:function(){return eN},mC:function(){return ti},mR:function(){return en},mY:function(){return tc},m_:function(){return D},mp:function(){return eq},n$:function(){return em},n9:function(){return ts},nJ:function(){return tb},nd:function(){return e7},o6:function(){return ee},oC:function(){return eF},ol:function(){return L},pf:function(){return eL},pu:function(){return tk},qI:function(){return _},qW:function(){return t_},qd:function(){return tE},qk:function(){return e8},qm:function(){return u},r1:function(){return tp},r6:function(){return G},rs:function(){return v},s0:function(){return Z},sN:function(){return eI},t$:function(){return J},t0:function(){return eE},t3:function(){return e5},tB:function(){return tn},tN:function(){return ep},u5:function(){return el},v9:function(){return ek},vh:function(){return eU},wX:function(){return E},wd:function(){return eg},xA:function(){return ey},xO:function(){return e3},xX:function(){return z},xZ:function(){return ta},zX:function(){return c},zg:function(){return ef}});var a=o(41021);let r=null;console.log=function(){};let n=function(e){let t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:null,o=window.location.origin,a=t||o;console.log("proxyBaseUrl:",r),console.log("serverRootPath:",e),e.length>0&&!a.endsWith(e)&&"/"!=e&&(a+=e,r=a),console.log("Updated proxyBaseUrl:",r)},c=()=>r||window.location.origin,s={GET:"GET",DELETE:"DELETE"},i=0,l=async e=>{let t=Date.now();t-i>6e4?(e.includes("Authentication Error - Expired Key")&&(a.ZP.info("UI Session Expired. Logging out."),i=t,document.cookie="token=; expires=Thu, 01 Jan 1970 00:00:00 UTC; path=/;",window.location.href=window.location.pathname),i=t):console.log("Error suppressed to prevent spam:",e)},d="Authorization";function h(){let e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:"Authorization";console.log("setGlobalLitellmHeaderName: ".concat(e)),d=e}let p=async()=>{console.log("Getting UI config");let e=await fetch("/litellm/.well-known/litellm-ui-config"),t=await e.json();return console.log("jsonData in getUiConfig:",t),n(t.server_root_path,t.proxy_base_url),t},w=async()=>{let e=r?"".concat(r,"/openapi.json"):"/openapi.json",t=await fetch(e);return await t.json()},u=async e=>{try{let t=r?"".concat(r,"/get/litellm_model_cost_map"):"/get/litellm_model_cost_map",o=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}}),a=await o.json();return console.log("received litellm model cost data: ".concat(a)),a}catch(e){throw console.error("Failed to get model cost map:",e),e}},g=async(e,t)=>{try{let o=r?"".concat(r,"/model/new"):"/model/new",n=await fetch(o,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({...t})});if(!n.ok){let e=await n.text()||"Network response was not ok";throw a.ZP.error(e),Error(e)}let c=await n.json();return console.log("API Response:",c),a.ZP.destroy(),a.ZP.success("Model ".concat(t.model_name," created successfully"),2),c}catch(e){throw console.error("Failed to create key:",e),e}},f=async e=>{try{let t=r?"".concat(r,"/model/settings"):"/model/settings",o=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok){let e=await o.text();throw l(e),Error("Network response was not ok")}return await o.json()}catch(e){console.error("Failed to get model settings:",e)}},y=async(e,t)=>{console.log("model_id in model delete call: ".concat(t));try{let o=r?"".concat(r,"/model/delete"):"/model/delete",a=await fetch(o,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({id:t})});if(!a.ok){let e=await a.text();throw l(e),console.error("Error response from the server:",e),Error("Network response was not ok")}let n=await a.json();return console.log("API Response:",n),n}catch(e){throw console.error("Failed to create key:",e),e}},m=async(e,t)=>{if(console.log("budget_id in budget delete call: ".concat(t)),null!=e)try{let o=r?"".concat(r,"/budget/delete"):"/budget/delete",a=await fetch(o,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({id:t})});if(!a.ok){let e=await a.text();throw l(e),console.error("Error response from the server:",e),Error("Network response was not ok")}let n=await a.json();return console.log("API Response:",n),n}catch(e){throw console.error("Failed to create key:",e),e}},k=async(e,t)=>{try{console.log("Form Values in budgetCreateCall:",t),console.log("Form Values after check:",t);let o=r?"".concat(r,"/budget/new"):"/budget/new",a=await fetch(o,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({...t})});if(!a.ok){let e=await a.text();throw l(e),console.error("Error response from the server:",e),Error("Network response was not ok")}let n=await a.json();return console.log("API Response:",n),n}catch(e){throw console.error("Failed to create key:",e),e}},_=async(e,t)=>{try{console.log("Form Values in budgetUpdateCall:",t),console.log("Form Values after check:",t);let o=r?"".concat(r,"/budget/update"):"/budget/update",a=await fetch(o,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({...t})});if(!a.ok){let e=await a.text();throw l(e),console.error("Error response from the server:",e),Error("Network response was not ok")}let n=await a.json();return console.log("API Response:",n),n}catch(e){throw console.error("Failed to create key:",e),e}},T=async(e,t)=>{try{let o=r?"".concat(r,"/invitation/new"):"/invitation/new",a=await fetch(o,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({user_id:t})});if(!a.ok){let e=await a.text();throw l(e),console.error("Error response from the server:",e),Error("Network response was not ok")}let n=await a.json();return console.log("API Response:",n),n}catch(e){throw console.error("Failed to create key:",e),e}},j=async e=>{try{let t=r?"".concat(r,"/alerting/settings"):"/alerting/settings",o=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok){let e=await o.text();throw l(e),Error("Network response was not ok")}return await o.json()}catch(e){throw console.error("Failed to get callbacks:",e),e}},E=async(e,t,o)=>{try{if(console.log("Form Values in keyCreateCall:",o),o.description&&(o.metadata||(o.metadata={}),o.metadata.description=o.description,delete o.description,o.metadata=JSON.stringify(o.metadata)),o.metadata){console.log("formValues.metadata:",o.metadata);try{o.metadata=JSON.parse(o.metadata)}catch(e){throw Error("Failed to parse metadata: "+e)}}console.log("Form Values after check:",o);let a=r?"".concat(r,"/key/generate"):"/key/generate",n=await fetch(a,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({user_id:t,...o})});if(!n.ok){let e=await n.text();throw l(e),console.error("Error response from the server:",e),Error(e)}let c=await n.json();return console.log("API Response:",c),c}catch(e){throw console.error("Failed to create key:",e),e}},C=async(e,t,o)=>{try{if(console.log("Form Values in keyCreateCall:",o),o.description&&(o.metadata||(o.metadata={}),o.metadata.description=o.description,delete o.description,o.metadata=JSON.stringify(o.metadata)),o.auto_create_key=!1,o.metadata){console.log("formValues.metadata:",o.metadata);try{o.metadata=JSON.parse(o.metadata)}catch(e){throw Error("Failed to parse metadata: "+e)}}console.log("Form Values after check:",o);let a=r?"".concat(r,"/user/new"):"/user/new",n=await fetch(a,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({user_id:t,...o})});if(!n.ok){let e=await n.text();throw l(e),console.error("Error response from the server:",e),Error(e)}let c=await n.json();return console.log("API Response:",c),c}catch(e){throw console.error("Failed to create key:",e),e}},S=async(e,t)=>{try{let o=r?"".concat(r,"/key/delete"):"/key/delete";console.log("in keyDeleteCall:",t);let a=await fetch(o,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({keys:[t]})});if(!a.ok){let e=await a.text();throw l(e),Error("Network response was not ok")}let n=await a.json();return console.log(n),n}catch(e){throw console.error("Failed to create key:",e),e}},N=async(e,t)=>{try{let o=r?"".concat(r,"/user/delete"):"/user/delete";console.log("in userDeleteCall:",t);let a=await fetch(o,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({user_ids:t})});if(!a.ok){let e=await a.text();throw l(e),Error("Network response was not ok")}let n=await a.json();return console.log(n),n}catch(e){throw console.error("Failed to delete user(s):",e),e}},v=async(e,t)=>{try{let o=r?"".concat(r,"/team/delete"):"/team/delete";console.log("in teamDeleteCall:",t);let a=await fetch(o,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({team_ids:[t]})});if(!a.ok){let e=await a.text();throw l(e),Error("Network response was not ok")}let n=await a.json();return console.log(n),n}catch(e){throw console.error("Failed to delete key:",e),e}},F=async function(e){let t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:null,o=arguments.length>2&&void 0!==arguments[2]?arguments[2]:null,a=arguments.length>3&&void 0!==arguments[3]?arguments[3]:null,n=arguments.length>4&&void 0!==arguments[4]?arguments[4]:null,c=arguments.length>5&&void 0!==arguments[5]?arguments[5]:null,s=arguments.length>6&&void 0!==arguments[6]?arguments[6]:null,i=arguments.length>7&&void 0!==arguments[7]?arguments[7]:null,h=arguments.length>8&&void 0!==arguments[8]?arguments[8]:null,p=arguments.length>9&&void 0!==arguments[9]?arguments[9]:null;try{let w=r?"".concat(r,"/user/list"):"/user/list";console.log("in userListCall");let u=new URLSearchParams;if(t&&t.length>0){let e=t.join(",");u.append("user_ids",e)}o&&u.append("page",o.toString()),a&&u.append("page_size",a.toString()),n&&u.append("user_email",n),c&&u.append("role",c),s&&u.append("team",s),i&&u.append("sso_user_ids",i),h&&u.append("sort_by",h),p&&u.append("sort_order",p);let g=u.toString();g&&(w+="?".concat(g));let f=await fetch(w,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!f.ok){let e=await f.text();throw l(e),Error("Network response was not ok")}let y=await f.json();return console.log("/user/list API Response:",y),y}catch(e){throw console.error("Failed to create key:",e),e}},b=async function(e,t,o){let a=arguments.length>3&&void 0!==arguments[3]&&arguments[3],n=arguments.length>4?arguments[4]:void 0,c=arguments.length>5?arguments[5]:void 0,s=arguments.length>6&&void 0!==arguments[6]&&arguments[6];console.log("userInfoCall: ".concat(t,", ").concat(o,", ").concat(a,", ").concat(n,", ").concat(c,", ").concat(s));try{let i;if(a){i=r?"".concat(r,"/user/list"):"/user/list";let e=new URLSearchParams;null!=n&&e.append("page",n.toString()),null!=c&&e.append("page_size",c.toString()),i+="?".concat(e.toString())}else i=r?"".concat(r,"/user/info"):"/user/info",("Admin"!==o&&"Admin Viewer"!==o||s)&&t&&(i+="?user_id=".concat(t));console.log("Requesting user data from:",i);let h=await fetch(i,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!h.ok){let e=await h.text();throw l(e),Error("Network response was not ok")}let p=await h.json();return console.log("API Response:",p),p}catch(e){throw console.error("Failed to fetch user data:",e),e}},x=async(e,t)=>{try{let o=r?"".concat(r,"/team/info"):"/team/info";t&&(o="".concat(o,"?team_id=").concat(t)),console.log("in teamInfoCall");let a=await fetch(o,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!a.ok){let e=await a.text();throw l(e),Error("Network response was not ok")}let n=await a.json();return console.log("API Response:",n),n}catch(e){throw console.error("Failed to create key:",e),e}},B=async function(e,t){let o=arguments.length>2&&void 0!==arguments[2]?arguments[2]:null,a=arguments.length>3&&void 0!==arguments[3]?arguments[3]:null,n=arguments.length>4&&void 0!==arguments[4]?arguments[4]:null;arguments.length>5&&void 0!==arguments[5]&&arguments[5],arguments.length>6&&void 0!==arguments[6]&&arguments[6],arguments.length>7&&void 0!==arguments[7]&&arguments[7],arguments.length>8&&void 0!==arguments[8]&&arguments[8];try{let c=r?"".concat(r,"/v2/team/list"):"/v2/team/list";console.log("in teamInfoCall");let s=new URLSearchParams;o&&s.append("user_id",o.toString()),t&&s.append("organization_id",t.toString()),a&&s.append("team_id",a.toString()),n&&s.append("team_alias",n.toString());let i=s.toString();i&&(c+="?".concat(i));let h=await fetch(c,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!h.ok){let e=await h.text();throw l(e),Error("Network response was not ok")}let p=await h.json();return console.log("/v2/team/list API Response:",p),p}catch(e){throw console.error("Failed to create key:",e),e}},O=async function(e,t){let o=arguments.length>2&&void 0!==arguments[2]?arguments[2]:null,a=arguments.length>3&&void 0!==arguments[3]?arguments[3]:null,n=arguments.length>4&&void 0!==arguments[4]?arguments[4]:null;try{let c=r?"".concat(r,"/team/list"):"/team/list";console.log("in teamInfoCall");let s=new URLSearchParams;o&&s.append("user_id",o.toString()),t&&s.append("organization_id",t.toString()),a&&s.append("team_id",a.toString()),n&&s.append("team_alias",n.toString());let i=s.toString();i&&(c+="?".concat(i));let h=await fetch(c,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!h.ok){let e=await h.text();throw l(e),Error("Network response was not ok")}let p=await h.json();return console.log("/team/list API Response:",p),p}catch(e){throw console.error("Failed to create key:",e),e}},P=async e=>{try{let t=r?"".concat(r,"/team/available"):"/team/available";console.log("in availableTeamListCall");let o=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok){let e=await o.text();throw l(e),Error("Network response was not ok")}let a=await o.json();return console.log("/team/available_teams API Response:",a),a}catch(e){throw e}},G=async e=>{try{let t=r?"".concat(r,"/organization/list"):"/organization/list",o=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok){let e=await o.text();throw l(e),Error("Network response was not ok")}return await o.json()}catch(e){throw console.error("Failed to create key:",e),e}},J=async(e,t)=>{try{let o=r?"".concat(r,"/organization/info"):"/organization/info";t&&(o="".concat(o,"?organization_id=").concat(t)),console.log("in teamInfoCall");let a=await fetch(o,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!a.ok){let e=await a.text();throw l(e),Error("Network response was not ok")}let n=await a.json();return console.log("API Response:",n),n}catch(e){throw console.error("Failed to create key:",e),e}},A=async(e,t)=>{try{if(console.log("Form Values in organizationCreateCall:",t),t.metadata){console.log("formValues.metadata:",t.metadata);try{t.metadata=JSON.parse(t.metadata)}catch(e){throw console.error("Failed to parse metadata:",e),Error("Failed to parse metadata: "+e)}}let o=r?"".concat(r,"/organization/new"):"/organization/new",a=await fetch(o,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({...t})});if(!a.ok){let e=await a.text();throw l(e),console.error("Error response from the server:",e),Error("Network response was not ok")}let n=await a.json();return console.log("API Response:",n),n}catch(e){throw console.error("Failed to create key:",e),e}},I=async(e,t)=>{try{console.log("Form Values in organizationUpdateCall:",t);let o=r?"".concat(r,"/organization/update"):"/organization/update",a=await fetch(o,{method:"PATCH",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({...t})});if(!a.ok){let e=await a.text();throw l(e),console.error("Error response from the server:",e),Error("Network response was not ok")}let n=await a.json();return console.log("Update Team Response:",n),n}catch(e){throw console.error("Failed to create key:",e),e}},R=async(e,t)=>{try{let o=r?"".concat(r,"/organization/delete"):"/organization/delete",a=await fetch(o,{method:"DELETE",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({organization_ids:[t]})});if(!a.ok){let e=await a.text();throw l(e),Error("Error deleting organization: ".concat(e))}return await a.json()}catch(e){throw console.error("Failed to delete organization:",e),e}},U=async(e,t)=>{try{let o=r?"".concat(r,"/utils/transform_request"):"/utils/transform_request",a=await fetch(o,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify(t)});if(!a.ok){let e=await a.text();throw l(e),Error("Network response was not ok")}return await a.json()}catch(e){throw console.error("Failed to create key:",e),e}},z=async function(e,t,o){let a=arguments.length>3&&void 0!==arguments[3]?arguments[3]:1;try{let n=r?"".concat(r,"/user/daily/activity"):"/user/daily/activity",c=new URLSearchParams,s=e=>{let t=e.getFullYear(),o=String(e.getMonth()+1).padStart(2,"0"),a=String(e.getDate()).padStart(2,"0");return"".concat(t,"-").concat(o,"-").concat(a)};c.append("start_date",s(t)),c.append("end_date",s(o)),c.append("page_size","1000"),c.append("page",a.toString());let i=c.toString();i&&(n+="?".concat(i));let h=await fetch(n,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!h.ok){let e=await h.text();throw l(e),Error("Network response was not ok")}return await h.json()}catch(e){throw console.error("Failed to create key:",e),e}},V=async function(e,t,o){let a=arguments.length>3&&void 0!==arguments[3]?arguments[3]:1,n=arguments.length>4&&void 0!==arguments[4]?arguments[4]:null;try{let c=r?"".concat(r,"/tag/daily/activity"):"/tag/daily/activity",s=new URLSearchParams,i=e=>{let t=e.getFullYear(),o=String(e.getMonth()+1).padStart(2,"0"),a=String(e.getDate()).padStart(2,"0");return"".concat(t,"-").concat(o,"-").concat(a)};s.append("start_date",i(t)),s.append("end_date",i(o)),s.append("page_size","1000"),s.append("page",a.toString()),n&&s.append("tags",n.join(","));let h=s.toString();h&&(c+="?".concat(h));let p=await fetch(c,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!p.ok){let e=await p.text();throw l(e),Error("Network response was not ok")}return await p.json()}catch(e){throw console.error("Failed to create key:",e),e}},L=async function(e,t,o){let a=arguments.length>3&&void 0!==arguments[3]?arguments[3]:1,n=arguments.length>4&&void 0!==arguments[4]?arguments[4]:null;try{let c=r?"".concat(r,"/team/daily/activity"):"/team/daily/activity",s=new URLSearchParams,i=e=>{let t=e.getFullYear(),o=String(e.getMonth()+1).padStart(2,"0"),a=String(e.getDate()).padStart(2,"0");return"".concat(t,"-").concat(o,"-").concat(a)};s.append("start_date",i(t)),s.append("end_date",i(o)),s.append("page_size","1000"),s.append("page",a.toString()),n&&s.append("team_ids",n.join(",")),s.append("exclude_team_ids","litellm-dashboard");let h=s.toString();h&&(c+="?".concat(h));let p=await fetch(c,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!p.ok){let e=await p.text();throw l(e),Error("Network response was not ok")}return await p.json()}catch(e){throw console.error("Failed to create key:",e),e}},M=async e=>{try{let t=r?"".concat(r,"/onboarding/get_token"):"/onboarding/get_token";t+="?invite_link=".concat(e);let o=await fetch(t,{method:"GET",headers:{"Content-Type":"application/json"}});if(!o.ok){let e=await o.text();throw l(e),Error("Network response was not ok")}return await o.json()}catch(e){throw console.error("Failed to create key:",e),e}},D=async(e,t,o,a)=>{let n=r?"".concat(r,"/onboarding/claim_token"):"/onboarding/claim_token";try{let r=await fetch(n,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({invitation_link:t,user_id:o,password:a})});if(!r.ok){let e=await r.text();throw l(e),Error("Network response was not ok")}let c=await r.json();return console.log(c),c}catch(e){throw console.error("Failed to delete key:",e),e}},Z=async(e,t,o)=>{try{let a=r?"".concat(r,"/key/").concat(t,"/regenerate"):"/key/".concat(t,"/regenerate"),n=await fetch(a,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify(o)});if(!n.ok){let e=await n.text();throw l(e),Error("Network response was not ok")}let c=await n.json();return console.log("Regenerate key Response:",c),c}catch(e){throw console.error("Failed to regenerate key:",e),e}},H=!1,q=null,Y=async(e,t,o)=>{try{console.log("modelInfoCall:",e,t,o);let n=r?"".concat(r,"/v2/model/info"):"/v2/model/info",c=new URLSearchParams;c.append("include_team_models","true"),c.toString()&&(n+="?".concat(c.toString()));let s=await fetch(n,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!s.ok){let e=await s.text();throw e+="error shown=".concat(H),H||(e.includes("No model list passed")&&(e="No Models Exist. Click Add Model to get started."),a.ZP.info(e,10),H=!0,q&&clearTimeout(q),q=setTimeout(()=>{H=!1},1e4)),Error("Network response was not ok")}let i=await s.json();return console.log("modelInfoCall:",i),i}catch(e){throw console.error("Failed to create key:",e),e}},X=async(e,t)=>{try{let o=r?"".concat(r,"/v1/model/info"):"/v1/model/info";o+="?litellm_model_id=".concat(t);let a=await fetch(o,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!a.ok)throw await a.text(),Error("Network response was not ok");let n=await a.json();return console.log("modelInfoV1Call:",n),n}catch(e){throw console.error("Failed to create key:",e),e}},$=async e=>{try{let t=r?"".concat(r,"/model_group/info"):"/model_group/info",o=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok)throw await o.text(),Error("Network response was not ok");let a=await o.json();return console.log("modelHubCall:",a),a}catch(e){throw console.error("Failed to create key:",e),e}},K=async e=>{try{let t=r?"".concat(r,"/get/allowed_ips"):"/get/allowed_ips",o=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok){let e=await o.text();throw Error("Network response was not ok: ".concat(e))}let a=await o.json();return console.log("getAllowedIPs:",a),a.data}catch(e){throw console.error("Failed to get allowed IPs:",e),e}},W=async(e,t)=>{try{let o=r?"".concat(r,"/add/allowed_ip"):"/add/allowed_ip",a=await fetch(o,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({ip:t})});if(!a.ok){let e=await a.text();throw Error("Network response was not ok: ".concat(e))}let n=await a.json();return console.log("addAllowedIP:",n),n}catch(e){throw console.error("Failed to add allowed IP:",e),e}},Q=async(e,t)=>{try{let o=r?"".concat(r,"/delete/allowed_ip"):"/delete/allowed_ip",a=await fetch(o,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({ip:t})});if(!a.ok){let e=await a.text();throw Error("Network response was not ok: ".concat(e))}let n=await a.json();return console.log("deleteAllowedIP:",n),n}catch(e){throw console.error("Failed to delete allowed IP:",e),e}},ee=async(e,t,o,a,n,c,s,i)=>{try{let t=r?"".concat(r,"/model/metrics"):"/model/metrics";a&&(t="".concat(t,"?_selected_model_group=").concat(a,"&startTime=").concat(n,"&endTime=").concat(c,"&api_key=").concat(s,"&customer=").concat(i));let o=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok){let e=await o.text();throw l(e),Error("Network response was not ok")}return await o.json()}catch(e){throw console.error("Failed to create key:",e),e}},et=async(e,t,o,a)=>{try{let n=r?"".concat(r,"/model/streaming_metrics"):"/model/streaming_metrics";t&&(n="".concat(n,"?_selected_model_group=").concat(t,"&startTime=").concat(o,"&endTime=").concat(a));let c=await fetch(n,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!c.ok){let e=await c.text();throw l(e),Error("Network response was not ok")}return await c.json()}catch(e){throw console.error("Failed to create key:",e),e}},eo=async(e,t,o,a,n,c,s,i)=>{try{let t=r?"".concat(r,"/model/metrics/slow_responses"):"/model/metrics/slow_responses";a&&(t="".concat(t,"?_selected_model_group=").concat(a,"&startTime=").concat(n,"&endTime=").concat(c,"&api_key=").concat(s,"&customer=").concat(i));let o=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok){let e=await o.text();throw l(e),Error("Network response was not ok")}return await o.json()}catch(e){throw console.error("Failed to create key:",e),e}},ea=async(e,t,o,a,n,c,s,i)=>{try{let t=r?"".concat(r,"/model/metrics/exceptions"):"/model/metrics/exceptions";a&&(t="".concat(t,"?_selected_model_group=").concat(a,"&startTime=").concat(n,"&endTime=").concat(c,"&api_key=").concat(s,"&customer=").concat(i));let o=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok){let e=await o.text();throw l(e),Error("Network response was not ok")}return await o.json()}catch(e){throw console.error("Failed to create key:",e),e}},er=async function(e,t,o){let a=arguments.length>3&&void 0!==arguments[3]&&arguments[3],n=arguments.length>4&&void 0!==arguments[4]?arguments[4]:null,c=(arguments.length>5&&void 0!==arguments[5]&&arguments[5],arguments.length>6&&void 0!==arguments[6]&&arguments[6]);console.log("in /models calls, globalLitellmHeaderName",d);try{let t=r?"".concat(r,"/models"):"/models",o=new URLSearchParams;o.append("include_model_access_groups","True"),!0===a&&o.append("return_wildcard_routes","True"),!0===c&&o.append("only_model_access_groups","True"),n&&o.append("team_id",n.toString()),o.toString()&&(t+="?".concat(o.toString()));let s=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!s.ok){let e=await s.text();throw l(e),Error("Network response was not ok")}return await s.json()}catch(e){throw console.error("Failed to create key:",e),e}},en=async e=>{try{let t=r?"".concat(r,"/global/spend/teams"):"/global/spend/teams";console.log("in teamSpendLogsCall:",t);let o=await fetch("".concat(t),{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok){let e=await o.text();throw l(e),Error("Network response was not ok")}let a=await o.json();return console.log(a),a}catch(e){throw console.error("Failed to create key:",e),e}},ec=async(e,t,o,a)=>{try{let n=r?"".concat(r,"/global/spend/tags"):"/global/spend/tags";t&&o&&(n="".concat(n,"?start_date=").concat(t,"&end_date=").concat(o)),a&&(n+="".concat(n,"&tags=").concat(a.join(","))),console.log("in tagsSpendLogsCall:",n);let c=await fetch("".concat(n),{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!c.ok)throw await c.text(),Error("Network response was not ok");let s=await c.json();return console.log(s),s}catch(e){throw console.error("Failed to create key:",e),e}},es=async e=>{try{let t=r?"".concat(r,"/global/spend/all_tag_names"):"/global/spend/all_tag_names";console.log("in global/spend/all_tag_names call",t);let o=await fetch("".concat(t),{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok)throw await o.text(),Error("Network response was not ok");let a=await o.json();return console.log(a),a}catch(e){throw console.error("Failed to create key:",e),e}},ei=async e=>{try{let t=r?"".concat(r,"/global/all_end_users"):"/global/all_end_users";console.log("in global/all_end_users call",t);let o=await fetch("".concat(t),{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok)throw await o.text(),Error("Network response was not ok");let a=await o.json();return console.log(a),a}catch(e){throw console.error("Failed to create key:",e),e}},el=async(e,t)=>{try{let o=r?"".concat(r,"/user/filter/ui"):"/user/filter/ui";t.get("user_email")&&(o+="?user_email=".concat(t.get("user_email"))),t.get("user_id")&&(o+="?user_id=".concat(t.get("user_id")));let a=await fetch(o,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!a.ok){let e=await a.text();throw l(e),Error("Network response was not ok")}return await a.json()}catch(e){throw console.error("Failed to create key:",e),e}},ed=async(e,t,o,a,n,c,s,i,h,p,w)=>{try{let u=r?"".concat(r,"/spend/logs/ui"):"/spend/logs/ui",g=new URLSearchParams;t&&g.append("api_key",t),o&&g.append("team_id",o),a&&g.append("request_id",a),n&&g.append("start_date",n),c&&g.append("end_date",c),s&&g.append("page",s.toString()),i&&g.append("page_size",i.toString()),h&&g.append("user_id",h),p&&g.append("status_filter",p),w&&g.append("model",w);let f=g.toString();f&&(u+="?".concat(f));let y=await fetch(u,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!y.ok){let e=await y.text();throw l(e),Error("Network response was not ok")}let m=await y.json();return console.log("Spend Logs Response:",m),m}catch(e){throw console.error("Failed to fetch spend logs:",e),e}},eh=async e=>{try{let t=r?"".concat(r,"/global/spend/logs"):"/global/spend/logs",o=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok){let e=await o.text();throw l(e),Error("Network response was not ok")}let a=await o.json();return console.log(a),a}catch(e){throw console.error("Failed to create key:",e),e}},ep=async e=>{try{let t=r?"".concat(r,"/global/spend/keys?limit=5"):"/global/spend/keys?limit=5",o=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok){let e=await o.text();throw l(e),Error("Network response was not ok")}let a=await o.json();return console.log(a),a}catch(e){throw console.error("Failed to create key:",e),e}},ew=async(e,t,o,a)=>{try{let n=r?"".concat(r,"/global/spend/end_users"):"/global/spend/end_users",c="";c=t?JSON.stringify({api_key:t,startTime:o,endTime:a}):JSON.stringify({startTime:o,endTime:a});let s={method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:c},i=await fetch(n,s);if(!i.ok){let e=await i.text();throw l(e),Error("Network response was not ok")}let h=await i.json();return console.log(h),h}catch(e){throw console.error("Failed to create key:",e),e}},eu=async(e,t,o,a)=>{try{let n=r?"".concat(r,"/global/spend/provider"):"/global/spend/provider";o&&a&&(n+="?start_date=".concat(o,"&end_date=").concat(a)),t&&(n+="&api_key=".concat(t));let c={method:"GET",headers:{[d]:"Bearer ".concat(e)}},s=await fetch(n,c);if(!s.ok){let e=await s.text();throw l(e),Error("Network response was not ok")}let i=await s.json();return console.log(i),i}catch(e){throw console.error("Failed to fetch spend data:",e),e}},eg=async(e,t,o)=>{try{let a=r?"".concat(r,"/global/activity"):"/global/activity";t&&o&&(a+="?start_date=".concat(t,"&end_date=").concat(o));let n={method:"GET",headers:{[d]:"Bearer ".concat(e)}},c=await fetch(a,n);if(!c.ok)throw await c.text(),Error("Network response was not ok");let s=await c.json();return console.log(s),s}catch(e){throw console.error("Failed to fetch spend data:",e),e}},ef=async(e,t,o)=>{try{let a=r?"".concat(r,"/global/activity/cache_hits"):"/global/activity/cache_hits";t&&o&&(a+="?start_date=".concat(t,"&end_date=").concat(o));let n={method:"GET",headers:{[d]:"Bearer ".concat(e)}},c=await fetch(a,n);if(!c.ok)throw await c.text(),Error("Network response was not ok");let s=await c.json();return console.log(s),s}catch(e){throw console.error("Failed to fetch spend data:",e),e}},ey=async(e,t,o)=>{try{let a=r?"".concat(r,"/global/activity/model"):"/global/activity/model";t&&o&&(a+="?start_date=".concat(t,"&end_date=").concat(o));let n={method:"GET",headers:{[d]:"Bearer ".concat(e)}},c=await fetch(a,n);if(!c.ok)throw await c.text(),Error("Network response was not ok");let s=await c.json();return console.log(s),s}catch(e){throw console.error("Failed to fetch spend data:",e),e}},em=async(e,t,o,a)=>{try{let n=r?"".concat(r,"/global/activity/exceptions"):"/global/activity/exceptions";t&&o&&(n+="?start_date=".concat(t,"&end_date=").concat(o)),a&&(n+="&model_group=".concat(a));let c={method:"GET",headers:{[d]:"Bearer ".concat(e)}},s=await fetch(n,c);if(!s.ok)throw await s.text(),Error("Network response was not ok");let i=await s.json();return console.log(i),i}catch(e){throw console.error("Failed to fetch spend data:",e),e}},ek=async(e,t,o,a)=>{try{let n=r?"".concat(r,"/global/activity/exceptions/deployment"):"/global/activity/exceptions/deployment";t&&o&&(n+="?start_date=".concat(t,"&end_date=").concat(o)),a&&(n+="&model_group=".concat(a));let c={method:"GET",headers:{[d]:"Bearer ".concat(e)}},s=await fetch(n,c);if(!s.ok)throw await s.text(),Error("Network response was not ok");let i=await s.json();return console.log(i),i}catch(e){throw console.error("Failed to fetch spend data:",e),e}},e_=async e=>{try{let t=r?"".concat(r,"/global/spend/models?limit=5"):"/global/spend/models?limit=5",o=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok){let e=await o.text();throw l(e),Error("Network response was not ok")}let a=await o.json();return console.log(a),a}catch(e){throw console.error("Failed to create key:",e),e}},eT=async(e,t)=>{try{let o=r?"".concat(r,"/v2/key/info"):"/v2/key/info",a=await fetch(o,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({keys:t})});if(!a.ok){let e=await a.text();if(e.includes("Invalid proxy server token passed"))throw Error("Invalid proxy server token passed");throw l(e),Error("Network response was not ok")}let n=await a.json();return console.log(n),n}catch(e){throw console.error("Failed to create key:",e),e}},ej=async(e,t,o)=>{try{console.log("Sending model connection test request:",JSON.stringify(t));let n=r?"".concat(r,"/health/test_connection"):"/health/test_connection",c=await fetch(n,{method:"POST",headers:{"Content-Type":"application/json",[d]:"Bearer ".concat(e)},body:JSON.stringify({litellm_params:t,mode:o})}),s=c.headers.get("content-type");if(!s||!s.includes("application/json")){let e=await c.text();throw console.error("Received non-JSON response:",e),Error("Received non-JSON response (".concat(c.status,": ").concat(c.statusText,"). Check network tab for details."))}let i=await c.json();if(!c.ok||"error"===i.status){if("error"===i.status);else{var a;return{status:"error",message:(null===(a=i.error)||void 0===a?void 0:a.message)||"Connection test failed: ".concat(c.status," ").concat(c.statusText)}}}return i}catch(e){throw console.error("Model connection test error:",e),e}},eE=async(e,t)=>{try{console.log("entering keyInfoV1Call");let o=r?"".concat(r,"/key/info"):"/key/info";o="".concat(o,"?key=").concat(t);let n=await fetch(o,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(console.log("response",n),!n.ok){let e=await n.text();l(e),a.ZP.error("Failed to fetch key info - "+e)}let c=await n.json();return console.log("data",c),c}catch(e){throw console.error("Failed to fetch key info:",e),e}},eC=async function(e,t,o,a,n,c,s,i){let h=arguments.length>8&&void 0!==arguments[8]?arguments[8]:null,p=arguments.length>9&&void 0!==arguments[9]?arguments[9]:null;try{let w=r?"".concat(r,"/key/list"):"/key/list";console.log("in keyListCall");let u=new URLSearchParams;o&&u.append("team_id",o.toString()),t&&u.append("organization_id",t.toString()),a&&u.append("key_alias",a),c&&u.append("key_hash",c),n&&u.append("user_id",n.toString()),s&&u.append("page",s.toString()),i&&u.append("size",i.toString()),h&&u.append("sort_by",h),p&&u.append("sort_order",p),u.append("return_full_object","true"),u.append("include_team_keys","true");let g=u.toString();g&&(w+="?".concat(g));let f=await fetch(w,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!f.ok){let e=await f.text();throw l(e),Error("Network response was not ok")}let y=await f.json();return console.log("/team/list API Response:",y),y}catch(e){throw console.error("Failed to create key:",e),e}},eS=async(e,t)=>{try{let o=r?"".concat(r,"/user/get_users?role=").concat(t):"/user/get_users?role=".concat(t);console.log("in userGetAllUsersCall:",o);let a=await fetch(o,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!a.ok){let e=await a.text();throw l(e),Error("Network response was not ok")}let n=await a.json();return console.log(n),n}catch(e){throw console.error("Failed to get requested models:",e),e}},eN=async e=>{try{let t=r?"".concat(r,"/user/available_roles"):"/user/available_roles",o=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok)throw await o.text(),Error("Network response was not ok");let a=await o.json();return console.log("response from user/available_role",a),a}catch(e){throw e}},ev=async(e,t)=>{try{if(console.log("Form Values in teamCreateCall:",t),t.metadata){console.log("formValues.metadata:",t.metadata);try{t.metadata=JSON.parse(t.metadata)}catch(e){throw Error("Failed to parse metadata: "+e)}}let o=r?"".concat(r,"/team/new"):"/team/new",a=await fetch(o,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({...t})});if(!a.ok){let e=await a.text();throw l(e),console.error("Error response from the server:",e),Error("Network response was not ok")}let n=await a.json();return console.log("API Response:",n),n}catch(e){throw console.error("Failed to create key:",e),e}},eF=async(e,t)=>{try{if(console.log("Form Values in credentialCreateCall:",t),t.metadata){console.log("formValues.metadata:",t.metadata);try{t.metadata=JSON.parse(t.metadata)}catch(e){throw Error("Failed to parse metadata: "+e)}}let o=r?"".concat(r,"/credentials"):"/credentials",a=await fetch(o,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({...t})});if(!a.ok){let e=await a.text();throw l(e),console.error("Error response from the server:",e),Error("Network response was not ok")}let n=await a.json();return console.log("API Response:",n),n}catch(e){throw console.error("Failed to create key:",e),e}},eb=async e=>{try{let t=r?"".concat(r,"/credentials"):"/credentials";console.log("in credentialListCall");let o=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok){let e=await o.text();throw l(e),Error("Network response was not ok")}let a=await o.json();return console.log("/credentials API Response:",a),a}catch(e){throw console.error("Failed to create key:",e),e}},ex=async(e,t,o)=>{try{let a=r?"".concat(r,"/credentials"):"/credentials";t?a+="/by_name/".concat(t):o&&(a+="/by_model/".concat(o)),console.log("in credentialListCall");let n=await fetch(a,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!n.ok){let e=await n.text();throw l(e),Error("Network response was not ok")}let c=await n.json();return console.log("/credentials API Response:",c),c}catch(e){throw console.error("Failed to create key:",e),e}},eB=async(e,t)=>{try{let o=r?"".concat(r,"/credentials/").concat(t):"/credentials/".concat(t);console.log("in credentialDeleteCall:",t);let a=await fetch(o,{method:"DELETE",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!a.ok){let e=await a.text();throw l(e),Error("Network response was not ok")}let n=await a.json();return console.log(n),n}catch(e){throw console.error("Failed to delete key:",e),e}},eO=async(e,t,o)=>{try{if(console.log("Form Values in credentialUpdateCall:",o),o.metadata){console.log("formValues.metadata:",o.metadata);try{o.metadata=JSON.parse(o.metadata)}catch(e){throw Error("Failed to parse metadata: "+e)}}let a=r?"".concat(r,"/credentials/").concat(t):"/credentials/".concat(t),n=await fetch(a,{method:"PATCH",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({...o})});if(!n.ok){let e=await n.text();throw l(e),console.error("Error response from the server:",e),Error("Network response was not ok")}let c=await n.json();return console.log("API Response:",c),c}catch(e){throw console.error("Failed to create key:",e),e}},eP=async(e,t)=>{try{if(console.log("Form Values in keyUpdateCall:",t),t.model_tpm_limit){console.log("formValues.model_tpm_limit:",t.model_tpm_limit);try{t.model_tpm_limit=JSON.parse(t.model_tpm_limit)}catch(e){throw Error("Failed to parse model_tpm_limit: "+e)}}if(t.model_rpm_limit){console.log("formValues.model_rpm_limit:",t.model_rpm_limit);try{t.model_rpm_limit=JSON.parse(t.model_rpm_limit)}catch(e){throw Error("Failed to parse model_rpm_limit: "+e)}}let o=r?"".concat(r,"/key/update"):"/key/update",a=await fetch(o,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({...t})});if(!a.ok){let e=await a.text();throw l(e),console.error("Error response from the server:",e),Error("Network response was not ok")}let n=await a.json();return console.log("Update key Response:",n),n}catch(e){throw console.error("Failed to create key:",e),e}},eG=async(e,t)=>{try{console.log("Form Values in teamUpateCall:",t);let o=r?"".concat(r,"/team/update"):"/team/update",n=await fetch(o,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({...t})});if(!n.ok){let e=await n.text();throw l(e),console.error("Error response from the server:",e),a.ZP.error("Failed to update team settings: "+e),Error(e)}let c=await n.json();return console.log("Update Team Response:",c),c}catch(e){throw console.error("Failed to update team:",e),e}},eJ=async(e,t,o)=>{try{console.log("Form Values in modelUpateCall:",t);let a=r?"".concat(r,"/model/").concat(o,"/update"):"/model/".concat(o,"/update"),n=await fetch(a,{method:"PATCH",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({...t})});if(!n.ok){let e=await n.text();throw l(e),console.error("Error update from the server:",e),Error("Network response was not ok")}let c=await n.json();return console.log("Update model Response:",c),c}catch(e){throw console.error("Failed to update model:",e),e}},eA=async(e,t,o)=>{try{console.log("Form Values in teamMemberAddCall:",o);let n=r?"".concat(r,"/team/member_add"):"/team/member_add",c=await fetch(n,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({team_id:t,member:o})});if(!c.ok){var a;let e=await c.text(),t={};try{t=JSON.parse(e)}catch(t){console.warn("Failed to parse error body as JSON:",e)}let o=(null==t?void 0:null===(a=t.detail)||void 0===a?void 0:a.error)||"Failed to add team member",r=Error(o);throw r.raw=t,r}let s=await c.json();return console.log("API Response:",s),s}catch(e){throw console.error("Failed to create key:",e),e}},eI=async(e,t,o)=>{try{console.log("Form Values in teamMemberUpdateCall:",o);let n=r?"".concat(r,"/team/member_update"):"/team/member_update",c=await fetch(n,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({team_id:t,role:o.role,user_id:o.user_id})});if(!c.ok){var a;let e=await c.text(),t={};try{t=JSON.parse(e)}catch(t){console.warn("Failed to parse error body as JSON:",e)}let o=(null==t?void 0:null===(a=t.detail)||void 0===a?void 0:a.error)||"Failed to add team member",r=Error(o);throw r.raw=t,r}let s=await c.json();return console.log("API Response:",s),s}catch(e){throw console.error("Failed to update team member:",e),e}},eR=async(e,t,o)=>{try{console.log("Form Values in teamMemberAddCall:",o);let a=r?"".concat(r,"/team/member_delete"):"/team/member_delete",n=await fetch(a,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({team_id:t,...void 0!==o.user_email&&{user_email:o.user_email},...void 0!==o.user_id&&{user_id:o.user_id}})});if(!n.ok){let e=await n.text();throw l(e),console.error("Error response from the server:",e),Error("Network response was not ok")}let c=await n.json();return console.log("API Response:",c),c}catch(e){throw console.error("Failed to create key:",e),e}},eU=async(e,t,o)=>{try{console.log("Form Values in teamMemberAddCall:",o);let a=r?"".concat(r,"/organization/member_add"):"/organization/member_add",n=await fetch(a,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({organization_id:t,member:o})});if(!n.ok){let e=await n.text();throw l(e),console.error("Error response from the server:",e),Error(e)}let c=await n.json();return console.log("API Response:",c),c}catch(e){throw console.error("Failed to create organization member:",e),e}},ez=async(e,t,o)=>{try{console.log("Form Values in organizationMemberDeleteCall:",o);let a=r?"".concat(r,"/organization/member_delete"):"/organization/member_delete",n=await fetch(a,{method:"DELETE",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({organization_id:t,user_id:o})});if(!n.ok){let e=await n.text();throw l(e),console.error("Error response from the server:",e),Error("Network response was not ok")}let c=await n.json();return console.log("API Response:",c),c}catch(e){throw console.error("Failed to delete organization member:",e),e}},eV=async(e,t,o)=>{try{console.log("Form Values in organizationMemberUpdateCall:",o);let a=r?"".concat(r,"/organization/member_update"):"/organization/member_update",n=await fetch(a,{method:"PATCH",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({organization_id:t,...o})});if(!n.ok){let e=await n.text();throw l(e),console.error("Error response from the server:",e),Error("Network response was not ok")}let c=await n.json();return console.log("API Response:",c),c}catch(e){throw console.error("Failed to update organization member:",e),e}},eL=async(e,t,o)=>{try{console.log("Form Values in userUpdateUserCall:",t);let a=r?"".concat(r,"/user/update"):"/user/update",n={...t};null!==o&&(n.user_role=o),n=JSON.stringify(n);let c=await fetch(a,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:n});if(!c.ok){let e=await c.text();throw l(e),console.error("Error response from the server:",e),Error("Network response was not ok")}let s=await c.json();return console.log("API Response:",s),s}catch(e){throw console.error("Failed to create key:",e),e}},eM=async(e,t)=>{try{let o=r?"".concat(r,"/health/services?service=").concat(t):"/health/services?service=".concat(t);console.log("Checking Slack Budget Alerts service health");let n=await fetch(o,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!n.ok){let e=await n.text();throw l(e),Error(e)}let c=await n.json();return a.ZP.success("Test request to ".concat(t," made - check logs/alerts on ").concat(t," to verify")),c}catch(e){throw console.error("Failed to perform health check:",e),e}},eD=async e=>{try{let t=r?"".concat(r,"/budget/list"):"/budget/list",o=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok){let e=await o.text();throw l(e),Error("Network response was not ok")}return await o.json()}catch(e){throw console.error("Failed to get callbacks:",e),e}},eZ=async(e,t,o)=>{try{let t=r?"".concat(r,"/get/config/callbacks"):"/get/config/callbacks",o=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok){let e=await o.text();throw l(e),Error("Network response was not ok")}return await o.json()}catch(e){throw console.error("Failed to get callbacks:",e),e}},eH=async e=>{try{let t=r?"".concat(r,"/config/list?config_type=general_settings"):"/config/list?config_type=general_settings",o=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok){let e=await o.text();throw l(e),Error("Network response was not ok")}return await o.json()}catch(e){throw console.error("Failed to get callbacks:",e),e}},eq=async e=>{try{let t=r?"".concat(r,"/config/pass_through_endpoint"):"/config/pass_through_endpoint",o=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok){let e=await o.text();throw l(e),Error("Network response was not ok")}return await o.json()}catch(e){throw console.error("Failed to get callbacks:",e),e}},eY=async(e,t)=>{try{let o=r?"".concat(r,"/config/field/info?field_name=").concat(t):"/config/field/info?field_name=".concat(t),a=await fetch(o,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!a.ok)throw await a.text(),Error("Network response was not ok");return await a.json()}catch(e){throw console.error("Failed to set callbacks:",e),e}},eX=async(e,t)=>{try{let o=r?"".concat(r,"/config/pass_through_endpoint"):"/config/pass_through_endpoint",a=await fetch(o,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({...t})});if(!a.ok){let e=await a.text();throw l(e),Error("Network response was not ok")}return await a.json()}catch(e){throw console.error("Failed to set callbacks:",e),e}},e$=async(e,t,o)=>{try{let n=r?"".concat(r,"/config/field/update"):"/config/field/update",c=await fetch(n,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({field_name:t,field_value:o,config_type:"general_settings"})});if(!c.ok){let e=await c.text();throw l(e),Error("Network response was not ok")}let s=await c.json();return a.ZP.success("Successfully updated value!"),s}catch(e){throw console.error("Failed to set callbacks:",e),e}},eK=async(e,t)=>{try{let o=r?"".concat(r,"/config/field/delete"):"/config/field/delete",n=await fetch(o,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({field_name:t,config_type:"general_settings"})});if(!n.ok){let e=await n.text();throw l(e),Error("Network response was not ok")}let c=await n.json();return a.ZP.success("Field reset on proxy"),c}catch(e){throw console.error("Failed to get callbacks:",e),e}},eW=async(e,t)=>{try{let o=r?"".concat(r,"/config/pass_through_endpoint?endpoint_id=").concat(t):"/config/pass_through_endpoint".concat(t),a=await fetch(o,{method:"DELETE",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!a.ok){let e=await a.text();throw l(e),Error("Network response was not ok")}return await a.json()}catch(e){throw console.error("Failed to get callbacks:",e),e}},eQ=async(e,t)=>{try{let o=r?"".concat(r,"/config/update"):"/config/update",a=await fetch(o,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({...t})});if(!a.ok){let e=await a.text();throw l(e),Error("Network response was not ok")}return await a.json()}catch(e){throw console.error("Failed to set callbacks:",e),e}},e0=async e=>{try{let t=r?"".concat(r,"/health"):"/health",o=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok){let e=await o.text();throw l(e),Error("Network response was not ok")}return await o.json()}catch(e){throw console.error("Failed to call /health:",e),e}},e1=async(e,t)=>{try{let o=r?"".concat(r,"/health?model=").concat(encodeURIComponent(t)):"/health?model=".concat(encodeURIComponent(t)),a=await fetch(o,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!a.ok){let e=await a.text();throw Error(e||"Network response was not ok")}return await a.json()}catch(e){throw console.error("Failed to call /health for model ".concat(t,":"),e),e}},e2=async e=>{try{let t=r?"".concat(r,"/cache/ping"):"/cache/ping",o=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok){let e=await o.text();throw l(e),Error(e)}return await o.json()}catch(e){throw console.error("Failed to call /cache/ping:",e),e}},e3=async e=>{try{let t=r?"".concat(r,"/health/latest"):"/health/latest",o=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok){let e=await o.text();throw l(e),Error(e)}return await o.json()}catch(e){throw console.error("Failed to call /health/latest:",e),e}},e4=async e=>{try{console.log("Getting proxy UI settings"),console.log("proxyBaseUrl in getProxyUISettings:",r);let t=r?"".concat(r,"/sso/get/ui_settings"):"/sso/get/ui_settings",o=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok)throw await o.text(),Error("Network response was not ok");return await o.json()}catch(e){throw console.error("Failed to get callbacks:",e),e}},e5=async e=>{try{let t=r?"".concat(r,"/v2/guardrails/list"):"/v2/guardrails/list",o=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok){let e=await o.text();throw l(e),Error("Network response was not ok")}return await o.json()}catch(e){throw console.error("Failed to get guardrails list:",e),e}},e6=async(e,t)=>{try{let o=r?"".concat(r,"/guardrails"):"/guardrails",a=await fetch(o,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({guardrail:t})});if(!a.ok){let e=await a.text();throw l(e),Error(e)}let n=await a.json();return console.log("Create guardrail response:",n),n}catch(e){throw console.error("Failed to create guardrail:",e),e}},e8=async(e,t,o)=>{try{let a=r?"".concat(r,"/spend/logs/ui/").concat(t,"?start_date=").concat(encodeURIComponent(o)):"/spend/logs/ui/".concat(t,"?start_date=").concat(encodeURIComponent(o));console.log("Fetching log details from:",a);let n=await fetch(a,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!n.ok){let e=await n.text();throw l(e),Error("Network response was not ok")}let c=await n.json();return console.log("Fetched log details:",c),c}catch(e){throw console.error("Failed to fetch log details:",e),e}},e9=async e=>{try{let t=r?"".concat(r,"/get/internal_user_settings"):"/get/internal_user_settings";console.log("Fetching SSO settings from:",t);let o=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok){let e=await o.text();throw l(e),Error("Network response was not ok")}let a=await o.json();return console.log("Fetched SSO settings:",a),a}catch(e){throw console.error("Failed to fetch SSO settings:",e),e}},e7=async(e,t)=>{try{let o=r?"".concat(r,"/update/internal_user_settings"):"/update/internal_user_settings";console.log("Updating internal user settings:",t);let n=await fetch(o,{method:"PATCH",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify(t)});if(!n.ok){let e=await n.text();throw l(e),Error("Network response was not ok")}let c=await n.json();return console.log("Updated internal user settings:",c),a.ZP.success("Internal user settings updated successfully"),c}catch(e){throw console.error("Failed to update internal user settings:",e),e}},te=async e=>{try{let t=r?"".concat(r,"/v1/mcp/server"):"/v1/mcp/server";console.log("Fetching MCP servers from:",t);let o=await fetch(t,{method:s.GET,headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok){let e=await o.text();throw l(e),Error("Network response was not ok")}let a=await o.json();return console.log("Fetched MCP servers:",a),a}catch(e){throw console.error("Failed to fetch MCP servers:",e),e}},tt=async(e,t)=>{try{console.log("Form Values in createMCPServer:",t);let o=r?"".concat(r,"/v1/mcp/server"):"/v1/mcp/server",a=await fetch(o,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({...t})});if(!a.ok){let e=await a.text();throw l(e),console.error("Error response from the server:",e),Error("Network response was not ok")}let n=await a.json();return console.log("API Response:",n),n}catch(e){throw console.error("Failed to create key:",e),e}},to=async(e,t)=>{try{let o=r?"".concat(r,"/v1/mcp/server"):"/v1/mcp/server",a=await fetch(o,{method:"PUT",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify(t)});if(!a.ok){let e=await a.text();throw l(e),Error("Network response was not ok")}return await a.json()}catch(e){throw console.error("Failed to update MCP server:",e),e}},ta=async(e,t)=>{try{let o=(r?"".concat(r):"")+"/v1/mcp/server/".concat(t);console.log("in deleteMCPServer:",t);let a=await fetch(o,{method:s.DELETE,headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!a.ok){let e=await a.text();throw l(e),Error("Network response was not ok")}}catch(e){throw console.error("Failed to delete key:",e),e}},tr=async(e,t)=>{try{let o=r?"".concat(r,"/mcp-rest/tools/list?server_id=").concat(t):"/mcp-rest/tools/list?server_id=".concat(t);console.log("Fetching MCP tools from:",o);let a=await fetch(o,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!a.ok){let e=await a.text();throw l(e),Error("Network response was not ok")}let n=await a.json();return console.log("Fetched MCP tools:",n),n}catch(e){throw console.error("Failed to fetch MCP tools:",e),e}},tn=async(e,t,o,a)=>{try{let n=r?"".concat(r,"/mcp-rest/tools/call"):"/mcp-rest/tools/call";console.log("Calling MCP tool:",t,"with arguments:",o);let c=await fetch(n,{method:"POST",headers:{[d]:"Bearer ".concat(e),"x-mcp-auth":a,"Content-Type":"application/json"},body:JSON.stringify({name:t,arguments:o})});if(!c.ok){let e=await c.text();throw l(e),Error("Network response was not ok")}let s=await c.json();return console.log("MCP tool call response:",s),s}catch(e){throw console.error("Failed to call MCP tool:",e),e}},tc=async(e,t)=>{try{let o=r?"".concat(r,"/tag/new"):"/tag/new",a=await fetch(o,{method:"POST",headers:{"Content-Type":"application/json",Authorization:"Bearer ".concat(e)},body:JSON.stringify(t)});if(!a.ok){let e=await a.text();await l(e);return}return await a.json()}catch(e){throw console.error("Error creating tag:",e),e}},ts=async(e,t)=>{try{let o=r?"".concat(r,"/tag/update"):"/tag/update",a=await fetch(o,{method:"POST",headers:{"Content-Type":"application/json",Authorization:"Bearer ".concat(e)},body:JSON.stringify(t)});if(!a.ok){let e=await a.text();await l(e);return}return await a.json()}catch(e){throw console.error("Error updating tag:",e),e}},ti=async(e,t)=>{try{let o=r?"".concat(r,"/tag/info"):"/tag/info",a=await fetch(o,{method:"POST",headers:{"Content-Type":"application/json",Authorization:"Bearer ".concat(e)},body:JSON.stringify({names:t})});if(!a.ok){let e=await a.text();return await l(e),{}}return await a.json()}catch(e){throw console.error("Error getting tag info:",e),e}},tl=async e=>{try{let t=r?"".concat(r,"/tag/list"):"/tag/list",o=await fetch(t,{method:"GET",headers:{Authorization:"Bearer ".concat(e)}});if(!o.ok){let e=await o.text();return await l(e),{}}return await o.json()}catch(e){throw console.error("Error listing tags:",e),e}},td=async(e,t)=>{try{let o=r?"".concat(r,"/tag/delete"):"/tag/delete",a=await fetch(o,{method:"POST",headers:{"Content-Type":"application/json",Authorization:"Bearer ".concat(e)},body:JSON.stringify({name:t})});if(!a.ok){let e=await a.text();await l(e);return}return await a.json()}catch(e){throw console.error("Error deleting tag:",e),e}},th=async e=>{try{let t=r?"".concat(r,"/get/default_team_settings"):"/get/default_team_settings";console.log("Fetching default team settings from:",t);let o=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok){let e=await o.text();throw l(e),Error("Network response was not ok")}let a=await o.json();return console.log("Fetched default team settings:",a),a}catch(e){throw console.error("Failed to fetch default team settings:",e),e}},tp=async(e,t)=>{try{let o=r?"".concat(r,"/update/default_team_settings"):"/update/default_team_settings";console.log("Updating default team settings:",t);let n=await fetch(o,{method:"PATCH",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify(t)});if(!n.ok){let e=await n.text();throw l(e),Error("Network response was not ok")}let c=await n.json();return console.log("Updated default team settings:",c),a.ZP.success("Default team settings updated successfully"),c}catch(e){throw console.error("Failed to update default team settings:",e),e}},tw=async(e,t)=>{try{let o=r?"".concat(r,"/team/permissions_list?team_id=").concat(t):"/team/permissions_list?team_id=".concat(t),a=await fetch(o,{method:"GET",headers:{"Content-Type":"application/json",Authorization:"Bearer ".concat(e)}});if(!a.ok){let e=await a.text();throw l(e),Error("Network response was not ok")}let n=await a.json();return console.log("Team permissions response:",n),n}catch(e){throw console.error("Failed to get team permissions:",e),e}},tu=async(e,t,o)=>{try{let a=r?"".concat(r,"/team/permissions_update"):"/team/permissions_update",n=await fetch(a,{method:"POST",headers:{"Content-Type":"application/json",Authorization:"Bearer ".concat(e)},body:JSON.stringify({team_id:t,team_member_permissions:o})});if(!n.ok){let e=await n.text();throw l(e),Error("Network response was not ok")}let c=await n.json();return console.log("Team permissions response:",c),c}catch(e){throw console.error("Failed to update team permissions:",e),e}},tg=async(e,t)=>{try{let o=r?"".concat(r,"/spend/logs/session/ui?session_id=").concat(encodeURIComponent(t)):"/spend/logs/session/ui?session_id=".concat(encodeURIComponent(t)),a=await fetch(o,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!a.ok){let e=await a.text();throw l(e),Error("Network response was not ok")}return await a.json()}catch(e){throw console.error("Failed to fetch session logs:",e),e}},tf=async(e,t)=>{try{let o=r?"".concat(r,"/vector_store/new"):"/vector_store/new",a=await fetch(o,{method:"POST",headers:{"Content-Type":"application/json",Authorization:"Bearer ".concat(e)},body:JSON.stringify(t)});if(!a.ok){let e=await a.json();throw Error(e.detail||"Failed to create vector store")}return await a.json()}catch(e){throw console.error("Error creating vector store:",e),e}},ty=async function(e){arguments.length>1&&void 0!==arguments[1]&&arguments[1],arguments.length>2&&void 0!==arguments[2]&&arguments[2];try{let t=r?"".concat(r,"/vector_store/list"):"/vector_store/list",o=await fetch(t,{method:"GET",headers:{"Content-Type":"application/json",Authorization:"Bearer ".concat(e)}});if(!o.ok){let e=await o.json();throw Error(e.detail||"Failed to list vector stores")}return await o.json()}catch(e){throw console.error("Error listing vector stores:",e),e}},tm=async(e,t)=>{try{let o=r?"".concat(r,"/vector_store/delete"):"/vector_store/delete",a=await fetch(o,{method:"POST",headers:{"Content-Type":"application/json",Authorization:"Bearer ".concat(e)},body:JSON.stringify({vector_store_id:t})});if(!a.ok){let e=await a.json();throw Error(e.detail||"Failed to delete vector store")}return await a.json()}catch(e){throw console.error("Error deleting vector store:",e),e}},tk=async e=>{try{let t=r?"".concat(r,"/email/event_settings"):"/email/event_settings",o=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok){let e=await o.text();throw l(e),Error("Failed to get email event settings")}let a=await o.json();return console.log("Email event settings response:",a),a}catch(e){throw console.error("Failed to get email event settings:",e),e}},t_=async(e,t)=>{try{let o=r?"".concat(r,"/email/event_settings"):"/email/event_settings",a=await fetch(o,{method:"PATCH",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify(t)});if(!a.ok){let e=await a.text();throw l(e),Error("Failed to update email event settings")}let n=await a.json();return console.log("Update email event settings response:",n),n}catch(e){throw console.error("Failed to update email event settings:",e),e}},tT=async e=>{try{let t=r?"".concat(r,"/email/event_settings/reset"):"/email/event_settings/reset",o=await fetch(t,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok){let e=await o.text();throw l(e),Error("Failed to reset email event settings")}let a=await o.json();return console.log("Reset email event settings response:",a),a}catch(e){throw console.error("Failed to reset email event settings:",e),e}},tj=async(e,t)=>{try{let o=r?"".concat(r,"/guardrails/").concat(t):"/guardrails/".concat(t),a=await fetch(o,{method:"DELETE",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!a.ok){let e=await a.text();throw l(e),Error(e)}let n=await a.json();return console.log("Delete guardrail response:",n),n}catch(e){throw console.error("Failed to delete guardrail:",e),e}},tE=async e=>{try{let t=r?"".concat(r,"/guardrails/ui/add_guardrail_settings"):"/guardrails/ui/add_guardrail_settings",o=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok){let e=await o.text();throw l(e),Error("Failed to get guardrail UI settings")}let a=await o.json();return console.log("Guardrail UI settings response:",a),a}catch(e){throw console.error("Failed to get guardrail UI settings:",e),e}},tC=async e=>{try{let t=r?"".concat(r,"/guardrails/ui/provider_specific_params"):"/guardrails/ui/provider_specific_params",o=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok){let e=await o.text();throw l(e),Error("Failed to get guardrail provider specific parameters")}let a=await o.json();return console.log("Guardrail provider specific params response:",a),a}catch(e){throw console.error("Failed to get guardrail provider specific parameters:",e),e}},tS=async(e,t)=>{try{let o=r?"".concat(r,"/guardrails/").concat(t,"/info"):"/guardrails/".concat(t,"/info"),a=await fetch(o,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!a.ok){let e=await a.text();throw l(e),Error("Failed to get guardrail info")}let n=await a.json();return console.log("Guardrail info response:",n),n}catch(e){throw console.error("Failed to get guardrail info:",e),e}},tN=async(e,t,o)=>{try{let a=r?"".concat(r,"/guardrails/").concat(t):"/guardrails/".concat(t),n=await fetch(a,{method:"PATCH",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify(o)});if(!n.ok){let e=await n.text();throw l(e),Error("Failed to update guardrail")}let c=await n.json();return console.log("Update guardrail response:",c),c}catch(e){throw console.error("Failed to update guardrail:",e),e}},tv=async e=>{try{let t=r?"".concat(r,"/get/sso_settings"):"/get/sso_settings";console.log("Fetching SSO configuration from:",t);let o=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok){let e=await o.text();throw l(e),Error("Network response was not ok")}let a=await o.json();return console.log("Fetched SSO configuration:",a),a}catch(e){throw console.error("Failed to fetch SSO configuration:",e),e}},tF=async(e,t)=>{try{let o=r?"".concat(r,"/update/sso_settings"):"/update/sso_settings";console.log("Updating SSO configuration:",t);let a=await fetch(o,{method:"PATCH",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify(t)});if(!a.ok){let e=await a.text();throw l(e),Error("Network response was not ok")}let n=await a.json();return console.log("Updated SSO configuration:",n),n}catch(e){throw console.error("Failed to update SSO configuration:",e),e}},tb=async(e,t,o,a,n)=>{try{let t=r?"".concat(r,"/audit"):"/audit",o=new URLSearchParams;a&&o.append("page",a.toString()),n&&o.append("page_size",n.toString());let c=o.toString();c&&(t+="?".concat(c));let s=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!s.ok){let e=await s.text();throw l(e),Error("Network response was not ok")}return await s.json()}catch(e){throw console.error("Failed to fetch audit logs:",e),e}},tx=async e=>{try{let t=r?"".concat(r,"/user/available_users"):"/user/available_users",o=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e)}});if(!o.ok){if(404===o.status)return null;let e=await o.text();throw l(e),Error("Network response was not ok")}return await o.json()}catch(e){throw console.error("Failed to fetch remaining users:",e),e}},tB=async(e,t,o)=>{try{let n=r?"".concat(r,"/config/pass_through_endpoint/").concat(encodeURIComponent(t)):"/config/pass_through_endpoint/".concat(encodeURIComponent(t)),c=await fetch(n,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify(o)});if(!c.ok){let e=await c.text();throw l(e),Error("Network response was not ok")}let s=await c.json();return a.ZP.success("Pass through endpoint updated successfully"),s}catch(e){throw console.error("Failed to update pass through endpoint:",e),e}},tO=async(e,t)=>{try{let o=r?"".concat(r,"/config/callback/delete"):"/config/callback/delete",a=await fetch(o,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({callback_name:t})});if(!a.ok){let e=await a.text();throw l(e),Error("Network response was not ok")}return await a.json()}catch(e){throw console.error("Failed to delete specific callback:",e),e}}}}]);
\ No newline at end of file
+"use strict";(self.webpackChunk_N_E=self.webpackChunk_N_E||[]).push([[250],{19250:function(e,t,o){o.d(t,{$D:function(){return eJ},$I:function(){return Q},$o:function(){return tF},AZ:function(){return Y},Au:function(){return e_},BL:function(){return eZ},Br:function(){return b},Bw:function(){return tx},E9:function(){return eY},EB:function(){return th},EG:function(){return eW},EY:function(){return e0},Eb:function(){return N},FC:function(){return eh},Gh:function(){return eG},H1:function(){return A},H2:function(){return r},Hx:function(){return ej},I1:function(){return S},It:function(){return O},J$:function(){return ec},JO:function(){return B},K8:function(){return h},K_:function(){return eQ},Ko:function(){return tN},LY:function(){return eV},Lp:function(){return eR},MO:function(){return p},Mx:function(){return tf},N3:function(){return eb},N8:function(){return ea},NL:function(){return e9},NV:function(){return m},Nc:function(){return eP},Nz:function(){return e1},O3:function(){return eD},OD:function(){return eC},OU:function(){return eu},Of:function(){return F},Og:function(){return y},Ou:function(){return ty},Ov:function(){return C},Oz:function(){return tO},PC:function(){return e6},PT:function(){return K},PY:function(){return tv},Pj:function(){return tt},Pv:function(){return tm},Qg:function(){return ex},RQ:function(){return j},Rg:function(){return et},Sb:function(){return ez},So:function(){return er},TF:function(){return tu},Tj:function(){return e2},Tx:function(){return tB},U8:function(){return te},UM:function(){return tl},VA:function(){return I},Vt:function(){return eX},W_:function(){return M},X:function(){return es},XB:function(){return tg},XO:function(){return T},Xd:function(){return eS},Xm:function(){return x},YU:function(){return eH},Yi:function(){return tj},Yo:function(){return U},Z9:function(){return V},Zr:function(){return k},a6:function(){return P},aC:function(){return tw},ao:function(){return eK},b1:function(){return ew},cq:function(){return R},cu:function(){return eA},e2:function(){return eT},eH:function(){return W},eW:function(){return tC},eZ:function(){return eO},fE:function(){return td},fP:function(){return eo},fk:function(){return tS},g:function(){return e4},gX:function(){return eB},gl:function(){return to},h3:function(){return ed},hT:function(){return ev},hy:function(){return f},ix:function(){return X},j2:function(){return ei},jA:function(){return e$},jE:function(){return eM},jr:function(){return tT},kK:function(){return g},kn:function(){return $},lP:function(){return w},lU:function(){return tr},lg:function(){return eN},mC:function(){return ti},mR:function(){return en},mY:function(){return tc},m_:function(){return D},mp:function(){return eq},n$:function(){return em},n9:function(){return ts},nJ:function(){return tb},nd:function(){return e7},o6:function(){return ee},oC:function(){return eF},ol:function(){return L},pf:function(){return eL},pu:function(){return tk},qI:function(){return _},qW:function(){return t_},qd:function(){return tE},qk:function(){return e8},qm:function(){return u},r1:function(){return tp},r6:function(){return G},rs:function(){return v},s0:function(){return Z},sN:function(){return eI},t$:function(){return J},t0:function(){return eE},t3:function(){return e5},tB:function(){return tn},tN:function(){return ep},u5:function(){return el},v9:function(){return ek},vh:function(){return eU},wX:function(){return E},wd:function(){return eg},xA:function(){return ey},xO:function(){return e3},xX:function(){return z},xZ:function(){return ta},zX:function(){return c},zg:function(){return ef}});var a=o(41021);let r=null;console.log=function(){};let n=function(e){let t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:null,o=window.location.origin,a=t||o;console.log("proxyBaseUrl:",r),console.log("serverRootPath:",e),e.length>0&&!a.endsWith(e)&&"/"!=e&&(a+=e,r=a),console.log("Updated proxyBaseUrl:",r)},c=()=>r||window.location.origin,s={GET:"GET",DELETE:"DELETE"},i=0,l=async e=>{let t=Date.now();t-i>6e4?(e.includes("Authentication Error - Expired Key")&&(a.ZP.info("UI Session Expired. Logging out."),i=t,document.cookie="token=; expires=Thu, 01 Jan 1970 00:00:00 UTC; path=/;",window.location.href=window.location.pathname),i=t):console.log("Error suppressed to prevent spam:",e)},d="Authorization";function h(){let e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:"Authorization";console.log("setGlobalLitellmHeaderName: ".concat(e)),d=e}let p=async()=>{console.log("Getting UI config");let e=await fetch("/.well-known/litellm-ui-config"),t=await e.json();return console.log("jsonData in getUiConfig:",t),n(t.server_root_path,t.proxy_base_url),t},w=async()=>{let e=r?"".concat(r,"/openapi.json"):"/openapi.json",t=await fetch(e);return await t.json()},u=async e=>{try{let t=r?"".concat(r,"/get/litellm_model_cost_map"):"/get/litellm_model_cost_map",o=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}}),a=await o.json();return console.log("received litellm model cost data: ".concat(a)),a}catch(e){throw console.error("Failed to get model cost map:",e),e}},g=async(e,t)=>{try{let o=r?"".concat(r,"/model/new"):"/model/new",n=await fetch(o,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({...t})});if(!n.ok){let e=await n.text()||"Network response was not ok";throw a.ZP.error(e),Error(e)}let c=await n.json();return console.log("API Response:",c),a.ZP.destroy(),a.ZP.success("Model ".concat(t.model_name," created successfully"),2),c}catch(e){throw console.error("Failed to create key:",e),e}},f=async e=>{try{let t=r?"".concat(r,"/model/settings"):"/model/settings",o=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok){let e=await o.text();throw l(e),Error("Network response was not ok")}return await o.json()}catch(e){console.error("Failed to get model settings:",e)}},y=async(e,t)=>{console.log("model_id in model delete call: ".concat(t));try{let o=r?"".concat(r,"/model/delete"):"/model/delete",a=await fetch(o,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({id:t})});if(!a.ok){let e=await a.text();throw l(e),console.error("Error response from the server:",e),Error("Network response was not ok")}let n=await a.json();return console.log("API Response:",n),n}catch(e){throw console.error("Failed to create key:",e),e}},m=async(e,t)=>{if(console.log("budget_id in budget delete call: ".concat(t)),null!=e)try{let o=r?"".concat(r,"/budget/delete"):"/budget/delete",a=await fetch(o,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({id:t})});if(!a.ok){let e=await a.text();throw l(e),console.error("Error response from the server:",e),Error("Network response was not ok")}let n=await a.json();return console.log("API Response:",n),n}catch(e){throw console.error("Failed to create key:",e),e}},k=async(e,t)=>{try{console.log("Form Values in budgetCreateCall:",t),console.log("Form Values after check:",t);let o=r?"".concat(r,"/budget/new"):"/budget/new",a=await fetch(o,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({...t})});if(!a.ok){let e=await a.text();throw l(e),console.error("Error response from the server:",e),Error("Network response was not ok")}let n=await a.json();return console.log("API Response:",n),n}catch(e){throw console.error("Failed to create key:",e),e}},_=async(e,t)=>{try{console.log("Form Values in budgetUpdateCall:",t),console.log("Form Values after check:",t);let o=r?"".concat(r,"/budget/update"):"/budget/update",a=await fetch(o,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({...t})});if(!a.ok){let e=await a.text();throw l(e),console.error("Error response from the server:",e),Error("Network response was not ok")}let n=await a.json();return console.log("API Response:",n),n}catch(e){throw console.error("Failed to create key:",e),e}},T=async(e,t)=>{try{let o=r?"".concat(r,"/invitation/new"):"/invitation/new",a=await fetch(o,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({user_id:t})});if(!a.ok){let e=await a.text();throw l(e),console.error("Error response from the server:",e),Error("Network response was not ok")}let n=await a.json();return console.log("API Response:",n),n}catch(e){throw console.error("Failed to create key:",e),e}},j=async e=>{try{let t=r?"".concat(r,"/alerting/settings"):"/alerting/settings",o=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok){let e=await o.text();throw l(e),Error("Network response was not ok")}return await o.json()}catch(e){throw console.error("Failed to get callbacks:",e),e}},E=async(e,t,o)=>{try{if(console.log("Form Values in keyCreateCall:",o),o.description&&(o.metadata||(o.metadata={}),o.metadata.description=o.description,delete o.description,o.metadata=JSON.stringify(o.metadata)),o.metadata){console.log("formValues.metadata:",o.metadata);try{o.metadata=JSON.parse(o.metadata)}catch(e){throw Error("Failed to parse metadata: "+e)}}console.log("Form Values after check:",o);let a=r?"".concat(r,"/key/generate"):"/key/generate",n=await fetch(a,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({user_id:t,...o})});if(!n.ok){let e=await n.text();throw l(e),console.error("Error response from the server:",e),Error(e)}let c=await n.json();return console.log("API Response:",c),c}catch(e){throw console.error("Failed to create key:",e),e}},C=async(e,t,o)=>{try{if(console.log("Form Values in keyCreateCall:",o),o.description&&(o.metadata||(o.metadata={}),o.metadata.description=o.description,delete o.description,o.metadata=JSON.stringify(o.metadata)),o.auto_create_key=!1,o.metadata){console.log("formValues.metadata:",o.metadata);try{o.metadata=JSON.parse(o.metadata)}catch(e){throw Error("Failed to parse metadata: "+e)}}console.log("Form Values after check:",o);let a=r?"".concat(r,"/user/new"):"/user/new",n=await fetch(a,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({user_id:t,...o})});if(!n.ok){let e=await n.text();throw l(e),console.error("Error response from the server:",e),Error(e)}let c=await n.json();return console.log("API Response:",c),c}catch(e){throw console.error("Failed to create key:",e),e}},S=async(e,t)=>{try{let o=r?"".concat(r,"/key/delete"):"/key/delete";console.log("in keyDeleteCall:",t);let a=await fetch(o,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({keys:[t]})});if(!a.ok){let e=await a.text();throw l(e),Error("Network response was not ok")}let n=await a.json();return console.log(n),n}catch(e){throw console.error("Failed to create key:",e),e}},N=async(e,t)=>{try{let o=r?"".concat(r,"/user/delete"):"/user/delete";console.log("in userDeleteCall:",t);let a=await fetch(o,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({user_ids:t})});if(!a.ok){let e=await a.text();throw l(e),Error("Network response was not ok")}let n=await a.json();return console.log(n),n}catch(e){throw console.error("Failed to delete user(s):",e),e}},v=async(e,t)=>{try{let o=r?"".concat(r,"/team/delete"):"/team/delete";console.log("in teamDeleteCall:",t);let a=await fetch(o,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({team_ids:[t]})});if(!a.ok){let e=await a.text();throw l(e),Error("Network response was not ok")}let n=await a.json();return console.log(n),n}catch(e){throw console.error("Failed to delete key:",e),e}},F=async function(e){let t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:null,o=arguments.length>2&&void 0!==arguments[2]?arguments[2]:null,a=arguments.length>3&&void 0!==arguments[3]?arguments[3]:null,n=arguments.length>4&&void 0!==arguments[4]?arguments[4]:null,c=arguments.length>5&&void 0!==arguments[5]?arguments[5]:null,s=arguments.length>6&&void 0!==arguments[6]?arguments[6]:null,i=arguments.length>7&&void 0!==arguments[7]?arguments[7]:null,h=arguments.length>8&&void 0!==arguments[8]?arguments[8]:null,p=arguments.length>9&&void 0!==arguments[9]?arguments[9]:null;try{let w=r?"".concat(r,"/user/list"):"/user/list";console.log("in userListCall");let u=new URLSearchParams;if(t&&t.length>0){let e=t.join(",");u.append("user_ids",e)}o&&u.append("page",o.toString()),a&&u.append("page_size",a.toString()),n&&u.append("user_email",n),c&&u.append("role",c),s&&u.append("team",s),i&&u.append("sso_user_ids",i),h&&u.append("sort_by",h),p&&u.append("sort_order",p);let g=u.toString();g&&(w+="?".concat(g));let f=await fetch(w,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!f.ok){let e=await f.text();throw l(e),Error("Network response was not ok")}let y=await f.json();return console.log("/user/list API Response:",y),y}catch(e){throw console.error("Failed to create key:",e),e}},b=async function(e,t,o){let a=arguments.length>3&&void 0!==arguments[3]&&arguments[3],n=arguments.length>4?arguments[4]:void 0,c=arguments.length>5?arguments[5]:void 0,s=arguments.length>6&&void 0!==arguments[6]&&arguments[6];console.log("userInfoCall: ".concat(t,", ").concat(o,", ").concat(a,", ").concat(n,", ").concat(c,", ").concat(s));try{let i;if(a){i=r?"".concat(r,"/user/list"):"/user/list";let e=new URLSearchParams;null!=n&&e.append("page",n.toString()),null!=c&&e.append("page_size",c.toString()),i+="?".concat(e.toString())}else i=r?"".concat(r,"/user/info"):"/user/info",("Admin"!==o&&"Admin Viewer"!==o||s)&&t&&(i+="?user_id=".concat(t));console.log("Requesting user data from:",i);let h=await fetch(i,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!h.ok){let e=await h.text();throw l(e),Error("Network response was not ok")}let p=await h.json();return console.log("API Response:",p),p}catch(e){throw console.error("Failed to fetch user data:",e),e}},x=async(e,t)=>{try{let o=r?"".concat(r,"/team/info"):"/team/info";t&&(o="".concat(o,"?team_id=").concat(t)),console.log("in teamInfoCall");let a=await fetch(o,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!a.ok){let e=await a.text();throw l(e),Error("Network response was not ok")}let n=await a.json();return console.log("API Response:",n),n}catch(e){throw console.error("Failed to create key:",e),e}},B=async function(e,t){let o=arguments.length>2&&void 0!==arguments[2]?arguments[2]:null,a=arguments.length>3&&void 0!==arguments[3]?arguments[3]:null,n=arguments.length>4&&void 0!==arguments[4]?arguments[4]:null;arguments.length>5&&void 0!==arguments[5]&&arguments[5],arguments.length>6&&void 0!==arguments[6]&&arguments[6],arguments.length>7&&void 0!==arguments[7]&&arguments[7],arguments.length>8&&void 0!==arguments[8]&&arguments[8];try{let c=r?"".concat(r,"/v2/team/list"):"/v2/team/list";console.log("in teamInfoCall");let s=new URLSearchParams;o&&s.append("user_id",o.toString()),t&&s.append("organization_id",t.toString()),a&&s.append("team_id",a.toString()),n&&s.append("team_alias",n.toString());let i=s.toString();i&&(c+="?".concat(i));let h=await fetch(c,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!h.ok){let e=await h.text();throw l(e),Error("Network response was not ok")}let p=await h.json();return console.log("/v2/team/list API Response:",p),p}catch(e){throw console.error("Failed to create key:",e),e}},O=async function(e,t){let o=arguments.length>2&&void 0!==arguments[2]?arguments[2]:null,a=arguments.length>3&&void 0!==arguments[3]?arguments[3]:null,n=arguments.length>4&&void 0!==arguments[4]?arguments[4]:null;try{let c=r?"".concat(r,"/team/list"):"/team/list";console.log("in teamInfoCall");let s=new URLSearchParams;o&&s.append("user_id",o.toString()),t&&s.append("organization_id",t.toString()),a&&s.append("team_id",a.toString()),n&&s.append("team_alias",n.toString());let i=s.toString();i&&(c+="?".concat(i));let h=await fetch(c,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!h.ok){let e=await h.text();throw l(e),Error("Network response was not ok")}let p=await h.json();return console.log("/team/list API Response:",p),p}catch(e){throw console.error("Failed to create key:",e),e}},P=async e=>{try{let t=r?"".concat(r,"/team/available"):"/team/available";console.log("in availableTeamListCall");let o=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok){let e=await o.text();throw l(e),Error("Network response was not ok")}let a=await o.json();return console.log("/team/available_teams API Response:",a),a}catch(e){throw e}},G=async e=>{try{let t=r?"".concat(r,"/organization/list"):"/organization/list",o=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok){let e=await o.text();throw l(e),Error("Network response was not ok")}return await o.json()}catch(e){throw console.error("Failed to create key:",e),e}},J=async(e,t)=>{try{let o=r?"".concat(r,"/organization/info"):"/organization/info";t&&(o="".concat(o,"?organization_id=").concat(t)),console.log("in teamInfoCall");let a=await fetch(o,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!a.ok){let e=await a.text();throw l(e),Error("Network response was not ok")}let n=await a.json();return console.log("API Response:",n),n}catch(e){throw console.error("Failed to create key:",e),e}},A=async(e,t)=>{try{if(console.log("Form Values in organizationCreateCall:",t),t.metadata){console.log("formValues.metadata:",t.metadata);try{t.metadata=JSON.parse(t.metadata)}catch(e){throw console.error("Failed to parse metadata:",e),Error("Failed to parse metadata: "+e)}}let o=r?"".concat(r,"/organization/new"):"/organization/new",a=await fetch(o,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({...t})});if(!a.ok){let e=await a.text();throw l(e),console.error("Error response from the server:",e),Error("Network response was not ok")}let n=await a.json();return console.log("API Response:",n),n}catch(e){throw console.error("Failed to create key:",e),e}},I=async(e,t)=>{try{console.log("Form Values in organizationUpdateCall:",t);let o=r?"".concat(r,"/organization/update"):"/organization/update",a=await fetch(o,{method:"PATCH",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({...t})});if(!a.ok){let e=await a.text();throw l(e),console.error("Error response from the server:",e),Error("Network response was not ok")}let n=await a.json();return console.log("Update Team Response:",n),n}catch(e){throw console.error("Failed to create key:",e),e}},R=async(e,t)=>{try{let o=r?"".concat(r,"/organization/delete"):"/organization/delete",a=await fetch(o,{method:"DELETE",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({organization_ids:[t]})});if(!a.ok){let e=await a.text();throw l(e),Error("Error deleting organization: ".concat(e))}return await a.json()}catch(e){throw console.error("Failed to delete organization:",e),e}},U=async(e,t)=>{try{let o=r?"".concat(r,"/utils/transform_request"):"/utils/transform_request",a=await fetch(o,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify(t)});if(!a.ok){let e=await a.text();throw l(e),Error("Network response was not ok")}return await a.json()}catch(e){throw console.error("Failed to create key:",e),e}},z=async function(e,t,o){let a=arguments.length>3&&void 0!==arguments[3]?arguments[3]:1;try{let n=r?"".concat(r,"/user/daily/activity"):"/user/daily/activity",c=new URLSearchParams,s=e=>{let t=e.getFullYear(),o=String(e.getMonth()+1).padStart(2,"0"),a=String(e.getDate()).padStart(2,"0");return"".concat(t,"-").concat(o,"-").concat(a)};c.append("start_date",s(t)),c.append("end_date",s(o)),c.append("page_size","1000"),c.append("page",a.toString());let i=c.toString();i&&(n+="?".concat(i));let h=await fetch(n,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!h.ok){let e=await h.text();throw l(e),Error("Network response was not ok")}return await h.json()}catch(e){throw console.error("Failed to create key:",e),e}},V=async function(e,t,o){let a=arguments.length>3&&void 0!==arguments[3]?arguments[3]:1,n=arguments.length>4&&void 0!==arguments[4]?arguments[4]:null;try{let c=r?"".concat(r,"/tag/daily/activity"):"/tag/daily/activity",s=new URLSearchParams,i=e=>{let t=e.getFullYear(),o=String(e.getMonth()+1).padStart(2,"0"),a=String(e.getDate()).padStart(2,"0");return"".concat(t,"-").concat(o,"-").concat(a)};s.append("start_date",i(t)),s.append("end_date",i(o)),s.append("page_size","1000"),s.append("page",a.toString()),n&&s.append("tags",n.join(","));let h=s.toString();h&&(c+="?".concat(h));let p=await fetch(c,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!p.ok){let e=await p.text();throw l(e),Error("Network response was not ok")}return await p.json()}catch(e){throw console.error("Failed to create key:",e),e}},L=async function(e,t,o){let a=arguments.length>3&&void 0!==arguments[3]?arguments[3]:1,n=arguments.length>4&&void 0!==arguments[4]?arguments[4]:null;try{let c=r?"".concat(r,"/team/daily/activity"):"/team/daily/activity",s=new URLSearchParams,i=e=>{let t=e.getFullYear(),o=String(e.getMonth()+1).padStart(2,"0"),a=String(e.getDate()).padStart(2,"0");return"".concat(t,"-").concat(o,"-").concat(a)};s.append("start_date",i(t)),s.append("end_date",i(o)),s.append("page_size","1000"),s.append("page",a.toString()),n&&s.append("team_ids",n.join(",")),s.append("exclude_team_ids","litellm-dashboard");let h=s.toString();h&&(c+="?".concat(h));let p=await fetch(c,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!p.ok){let e=await p.text();throw l(e),Error("Network response was not ok")}return await p.json()}catch(e){throw console.error("Failed to create key:",e),e}},M=async e=>{try{let t=r?"".concat(r,"/onboarding/get_token"):"/onboarding/get_token";t+="?invite_link=".concat(e);let o=await fetch(t,{method:"GET",headers:{"Content-Type":"application/json"}});if(!o.ok){let e=await o.text();throw l(e),Error("Network response was not ok")}return await o.json()}catch(e){throw console.error("Failed to create key:",e),e}},D=async(e,t,o,a)=>{let n=r?"".concat(r,"/onboarding/claim_token"):"/onboarding/claim_token";try{let r=await fetch(n,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({invitation_link:t,user_id:o,password:a})});if(!r.ok){let e=await r.text();throw l(e),Error("Network response was not ok")}let c=await r.json();return console.log(c),c}catch(e){throw console.error("Failed to delete key:",e),e}},Z=async(e,t,o)=>{try{let a=r?"".concat(r,"/key/").concat(t,"/regenerate"):"/key/".concat(t,"/regenerate"),n=await fetch(a,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify(o)});if(!n.ok){let e=await n.text();throw l(e),Error("Network response was not ok")}let c=await n.json();return console.log("Regenerate key Response:",c),c}catch(e){throw console.error("Failed to regenerate key:",e),e}},H=!1,q=null,Y=async(e,t,o)=>{try{console.log("modelInfoCall:",e,t,o);let n=r?"".concat(r,"/v2/model/info"):"/v2/model/info",c=new URLSearchParams;c.append("include_team_models","true"),c.toString()&&(n+="?".concat(c.toString()));let s=await fetch(n,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!s.ok){let e=await s.text();throw e+="error shown=".concat(H),H||(e.includes("No model list passed")&&(e="No Models Exist. Click Add Model to get started."),a.ZP.info(e,10),H=!0,q&&clearTimeout(q),q=setTimeout(()=>{H=!1},1e4)),Error("Network response was not ok")}let i=await s.json();return console.log("modelInfoCall:",i),i}catch(e){throw console.error("Failed to create key:",e),e}},X=async(e,t)=>{try{let o=r?"".concat(r,"/v1/model/info"):"/v1/model/info";o+="?litellm_model_id=".concat(t);let a=await fetch(o,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!a.ok)throw await a.text(),Error("Network response was not ok");let n=await a.json();return console.log("modelInfoV1Call:",n),n}catch(e){throw console.error("Failed to create key:",e),e}},$=async e=>{try{let t=r?"".concat(r,"/model_group/info"):"/model_group/info",o=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok)throw await o.text(),Error("Network response was not ok");let a=await o.json();return console.log("modelHubCall:",a),a}catch(e){throw console.error("Failed to create key:",e),e}},K=async e=>{try{let t=r?"".concat(r,"/get/allowed_ips"):"/get/allowed_ips",o=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok){let e=await o.text();throw Error("Network response was not ok: ".concat(e))}let a=await o.json();return console.log("getAllowedIPs:",a),a.data}catch(e){throw console.error("Failed to get allowed IPs:",e),e}},W=async(e,t)=>{try{let o=r?"".concat(r,"/add/allowed_ip"):"/add/allowed_ip",a=await fetch(o,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({ip:t})});if(!a.ok){let e=await a.text();throw Error("Network response was not ok: ".concat(e))}let n=await a.json();return console.log("addAllowedIP:",n),n}catch(e){throw console.error("Failed to add allowed IP:",e),e}},Q=async(e,t)=>{try{let o=r?"".concat(r,"/delete/allowed_ip"):"/delete/allowed_ip",a=await fetch(o,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({ip:t})});if(!a.ok){let e=await a.text();throw Error("Network response was not ok: ".concat(e))}let n=await a.json();return console.log("deleteAllowedIP:",n),n}catch(e){throw console.error("Failed to delete allowed IP:",e),e}},ee=async(e,t,o,a,n,c,s,i)=>{try{let t=r?"".concat(r,"/model/metrics"):"/model/metrics";a&&(t="".concat(t,"?_selected_model_group=").concat(a,"&startTime=").concat(n,"&endTime=").concat(c,"&api_key=").concat(s,"&customer=").concat(i));let o=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok){let e=await o.text();throw l(e),Error("Network response was not ok")}return await o.json()}catch(e){throw console.error("Failed to create key:",e),e}},et=async(e,t,o,a)=>{try{let n=r?"".concat(r,"/model/streaming_metrics"):"/model/streaming_metrics";t&&(n="".concat(n,"?_selected_model_group=").concat(t,"&startTime=").concat(o,"&endTime=").concat(a));let c=await fetch(n,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!c.ok){let e=await c.text();throw l(e),Error("Network response was not ok")}return await c.json()}catch(e){throw console.error("Failed to create key:",e),e}},eo=async(e,t,o,a,n,c,s,i)=>{try{let t=r?"".concat(r,"/model/metrics/slow_responses"):"/model/metrics/slow_responses";a&&(t="".concat(t,"?_selected_model_group=").concat(a,"&startTime=").concat(n,"&endTime=").concat(c,"&api_key=").concat(s,"&customer=").concat(i));let o=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok){let e=await o.text();throw l(e),Error("Network response was not ok")}return await o.json()}catch(e){throw console.error("Failed to create key:",e),e}},ea=async(e,t,o,a,n,c,s,i)=>{try{let t=r?"".concat(r,"/model/metrics/exceptions"):"/model/metrics/exceptions";a&&(t="".concat(t,"?_selected_model_group=").concat(a,"&startTime=").concat(n,"&endTime=").concat(c,"&api_key=").concat(s,"&customer=").concat(i));let o=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok){let e=await o.text();throw l(e),Error("Network response was not ok")}return await o.json()}catch(e){throw console.error("Failed to create key:",e),e}},er=async function(e,t,o){let a=arguments.length>3&&void 0!==arguments[3]&&arguments[3],n=arguments.length>4&&void 0!==arguments[4]?arguments[4]:null,c=(arguments.length>5&&void 0!==arguments[5]&&arguments[5],arguments.length>6&&void 0!==arguments[6]&&arguments[6]);console.log("in /models calls, globalLitellmHeaderName",d);try{let t=r?"".concat(r,"/models"):"/models",o=new URLSearchParams;o.append("include_model_access_groups","True"),!0===a&&o.append("return_wildcard_routes","True"),!0===c&&o.append("only_model_access_groups","True"),n&&o.append("team_id",n.toString()),o.toString()&&(t+="?".concat(o.toString()));let s=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!s.ok){let e=await s.text();throw l(e),Error("Network response was not ok")}return await s.json()}catch(e){throw console.error("Failed to create key:",e),e}},en=async e=>{try{let t=r?"".concat(r,"/global/spend/teams"):"/global/spend/teams";console.log("in teamSpendLogsCall:",t);let o=await fetch("".concat(t),{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok){let e=await o.text();throw l(e),Error("Network response was not ok")}let a=await o.json();return console.log(a),a}catch(e){throw console.error("Failed to create key:",e),e}},ec=async(e,t,o,a)=>{try{let n=r?"".concat(r,"/global/spend/tags"):"/global/spend/tags";t&&o&&(n="".concat(n,"?start_date=").concat(t,"&end_date=").concat(o)),a&&(n+="".concat(n,"&tags=").concat(a.join(","))),console.log("in tagsSpendLogsCall:",n);let c=await fetch("".concat(n),{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!c.ok)throw await c.text(),Error("Network response was not ok");let s=await c.json();return console.log(s),s}catch(e){throw console.error("Failed to create key:",e),e}},es=async e=>{try{let t=r?"".concat(r,"/global/spend/all_tag_names"):"/global/spend/all_tag_names";console.log("in global/spend/all_tag_names call",t);let o=await fetch("".concat(t),{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok)throw await o.text(),Error("Network response was not ok");let a=await o.json();return console.log(a),a}catch(e){throw console.error("Failed to create key:",e),e}},ei=async e=>{try{let t=r?"".concat(r,"/global/all_end_users"):"/global/all_end_users";console.log("in global/all_end_users call",t);let o=await fetch("".concat(t),{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok)throw await o.text(),Error("Network response was not ok");let a=await o.json();return console.log(a),a}catch(e){throw console.error("Failed to create key:",e),e}},el=async(e,t)=>{try{let o=r?"".concat(r,"/user/filter/ui"):"/user/filter/ui";t.get("user_email")&&(o+="?user_email=".concat(t.get("user_email"))),t.get("user_id")&&(o+="?user_id=".concat(t.get("user_id")));let a=await fetch(o,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!a.ok){let e=await a.text();throw l(e),Error("Network response was not ok")}return await a.json()}catch(e){throw console.error("Failed to create key:",e),e}},ed=async(e,t,o,a,n,c,s,i,h,p,w)=>{try{let u=r?"".concat(r,"/spend/logs/ui"):"/spend/logs/ui",g=new URLSearchParams;t&&g.append("api_key",t),o&&g.append("team_id",o),a&&g.append("request_id",a),n&&g.append("start_date",n),c&&g.append("end_date",c),s&&g.append("page",s.toString()),i&&g.append("page_size",i.toString()),h&&g.append("user_id",h),p&&g.append("status_filter",p),w&&g.append("model",w);let f=g.toString();f&&(u+="?".concat(f));let y=await fetch(u,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!y.ok){let e=await y.text();throw l(e),Error("Network response was not ok")}let m=await y.json();return console.log("Spend Logs Response:",m),m}catch(e){throw console.error("Failed to fetch spend logs:",e),e}},eh=async e=>{try{let t=r?"".concat(r,"/global/spend/logs"):"/global/spend/logs",o=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok){let e=await o.text();throw l(e),Error("Network response was not ok")}let a=await o.json();return console.log(a),a}catch(e){throw console.error("Failed to create key:",e),e}},ep=async e=>{try{let t=r?"".concat(r,"/global/spend/keys?limit=5"):"/global/spend/keys?limit=5",o=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok){let e=await o.text();throw l(e),Error("Network response was not ok")}let a=await o.json();return console.log(a),a}catch(e){throw console.error("Failed to create key:",e),e}},ew=async(e,t,o,a)=>{try{let n=r?"".concat(r,"/global/spend/end_users"):"/global/spend/end_users",c="";c=t?JSON.stringify({api_key:t,startTime:o,endTime:a}):JSON.stringify({startTime:o,endTime:a});let s={method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:c},i=await fetch(n,s);if(!i.ok){let e=await i.text();throw l(e),Error("Network response was not ok")}let h=await i.json();return console.log(h),h}catch(e){throw console.error("Failed to create key:",e),e}},eu=async(e,t,o,a)=>{try{let n=r?"".concat(r,"/global/spend/provider"):"/global/spend/provider";o&&a&&(n+="?start_date=".concat(o,"&end_date=").concat(a)),t&&(n+="&api_key=".concat(t));let c={method:"GET",headers:{[d]:"Bearer ".concat(e)}},s=await fetch(n,c);if(!s.ok){let e=await s.text();throw l(e),Error("Network response was not ok")}let i=await s.json();return console.log(i),i}catch(e){throw console.error("Failed to fetch spend data:",e),e}},eg=async(e,t,o)=>{try{let a=r?"".concat(r,"/global/activity"):"/global/activity";t&&o&&(a+="?start_date=".concat(t,"&end_date=").concat(o));let n={method:"GET",headers:{[d]:"Bearer ".concat(e)}},c=await fetch(a,n);if(!c.ok)throw await c.text(),Error("Network response was not ok");let s=await c.json();return console.log(s),s}catch(e){throw console.error("Failed to fetch spend data:",e),e}},ef=async(e,t,o)=>{try{let a=r?"".concat(r,"/global/activity/cache_hits"):"/global/activity/cache_hits";t&&o&&(a+="?start_date=".concat(t,"&end_date=").concat(o));let n={method:"GET",headers:{[d]:"Bearer ".concat(e)}},c=await fetch(a,n);if(!c.ok)throw await c.text(),Error("Network response was not ok");let s=await c.json();return console.log(s),s}catch(e){throw console.error("Failed to fetch spend data:",e),e}},ey=async(e,t,o)=>{try{let a=r?"".concat(r,"/global/activity/model"):"/global/activity/model";t&&o&&(a+="?start_date=".concat(t,"&end_date=").concat(o));let n={method:"GET",headers:{[d]:"Bearer ".concat(e)}},c=await fetch(a,n);if(!c.ok)throw await c.text(),Error("Network response was not ok");let s=await c.json();return console.log(s),s}catch(e){throw console.error("Failed to fetch spend data:",e),e}},em=async(e,t,o,a)=>{try{let n=r?"".concat(r,"/global/activity/exceptions"):"/global/activity/exceptions";t&&o&&(n+="?start_date=".concat(t,"&end_date=").concat(o)),a&&(n+="&model_group=".concat(a));let c={method:"GET",headers:{[d]:"Bearer ".concat(e)}},s=await fetch(n,c);if(!s.ok)throw await s.text(),Error("Network response was not ok");let i=await s.json();return console.log(i),i}catch(e){throw console.error("Failed to fetch spend data:",e),e}},ek=async(e,t,o,a)=>{try{let n=r?"".concat(r,"/global/activity/exceptions/deployment"):"/global/activity/exceptions/deployment";t&&o&&(n+="?start_date=".concat(t,"&end_date=").concat(o)),a&&(n+="&model_group=".concat(a));let c={method:"GET",headers:{[d]:"Bearer ".concat(e)}},s=await fetch(n,c);if(!s.ok)throw await s.text(),Error("Network response was not ok");let i=await s.json();return console.log(i),i}catch(e){throw console.error("Failed to fetch spend data:",e),e}},e_=async e=>{try{let t=r?"".concat(r,"/global/spend/models?limit=5"):"/global/spend/models?limit=5",o=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok){let e=await o.text();throw l(e),Error("Network response was not ok")}let a=await o.json();return console.log(a),a}catch(e){throw console.error("Failed to create key:",e),e}},eT=async(e,t)=>{try{let o=r?"".concat(r,"/v2/key/info"):"/v2/key/info",a=await fetch(o,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({keys:t})});if(!a.ok){let e=await a.text();if(e.includes("Invalid proxy server token passed"))throw Error("Invalid proxy server token passed");throw l(e),Error("Network response was not ok")}let n=await a.json();return console.log(n),n}catch(e){throw console.error("Failed to create key:",e),e}},ej=async(e,t,o)=>{try{console.log("Sending model connection test request:",JSON.stringify(t));let n=r?"".concat(r,"/health/test_connection"):"/health/test_connection",c=await fetch(n,{method:"POST",headers:{"Content-Type":"application/json",[d]:"Bearer ".concat(e)},body:JSON.stringify({litellm_params:t,mode:o})}),s=c.headers.get("content-type");if(!s||!s.includes("application/json")){let e=await c.text();throw console.error("Received non-JSON response:",e),Error("Received non-JSON response (".concat(c.status,": ").concat(c.statusText,"). Check network tab for details."))}let i=await c.json();if(!c.ok||"error"===i.status){if("error"===i.status);else{var a;return{status:"error",message:(null===(a=i.error)||void 0===a?void 0:a.message)||"Connection test failed: ".concat(c.status," ").concat(c.statusText)}}}return i}catch(e){throw console.error("Model connection test error:",e),e}},eE=async(e,t)=>{try{console.log("entering keyInfoV1Call");let o=r?"".concat(r,"/key/info"):"/key/info";o="".concat(o,"?key=").concat(t);let n=await fetch(o,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(console.log("response",n),!n.ok){let e=await n.text();l(e),a.ZP.error("Failed to fetch key info - "+e)}let c=await n.json();return console.log("data",c),c}catch(e){throw console.error("Failed to fetch key info:",e),e}},eC=async function(e,t,o,a,n,c,s,i){let h=arguments.length>8&&void 0!==arguments[8]?arguments[8]:null,p=arguments.length>9&&void 0!==arguments[9]?arguments[9]:null;try{let w=r?"".concat(r,"/key/list"):"/key/list";console.log("in keyListCall");let u=new URLSearchParams;o&&u.append("team_id",o.toString()),t&&u.append("organization_id",t.toString()),a&&u.append("key_alias",a),c&&u.append("key_hash",c),n&&u.append("user_id",n.toString()),s&&u.append("page",s.toString()),i&&u.append("size",i.toString()),h&&u.append("sort_by",h),p&&u.append("sort_order",p),u.append("return_full_object","true"),u.append("include_team_keys","true");let g=u.toString();g&&(w+="?".concat(g));let f=await fetch(w,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!f.ok){let e=await f.text();throw l(e),Error("Network response was not ok")}let y=await f.json();return console.log("/team/list API Response:",y),y}catch(e){throw console.error("Failed to create key:",e),e}},eS=async(e,t)=>{try{let o=r?"".concat(r,"/user/get_users?role=").concat(t):"/user/get_users?role=".concat(t);console.log("in userGetAllUsersCall:",o);let a=await fetch(o,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!a.ok){let e=await a.text();throw l(e),Error("Network response was not ok")}let n=await a.json();return console.log(n),n}catch(e){throw console.error("Failed to get requested models:",e),e}},eN=async e=>{try{let t=r?"".concat(r,"/user/available_roles"):"/user/available_roles",o=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok)throw await o.text(),Error("Network response was not ok");let a=await o.json();return console.log("response from user/available_role",a),a}catch(e){throw e}},ev=async(e,t)=>{try{if(console.log("Form Values in teamCreateCall:",t),t.metadata){console.log("formValues.metadata:",t.metadata);try{t.metadata=JSON.parse(t.metadata)}catch(e){throw Error("Failed to parse metadata: "+e)}}let o=r?"".concat(r,"/team/new"):"/team/new",a=await fetch(o,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({...t})});if(!a.ok){let e=await a.text();throw l(e),console.error("Error response from the server:",e),Error("Network response was not ok")}let n=await a.json();return console.log("API Response:",n),n}catch(e){throw console.error("Failed to create key:",e),e}},eF=async(e,t)=>{try{if(console.log("Form Values in credentialCreateCall:",t),t.metadata){console.log("formValues.metadata:",t.metadata);try{t.metadata=JSON.parse(t.metadata)}catch(e){throw Error("Failed to parse metadata: "+e)}}let o=r?"".concat(r,"/credentials"):"/credentials",a=await fetch(o,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({...t})});if(!a.ok){let e=await a.text();throw l(e),console.error("Error response from the server:",e),Error("Network response was not ok")}let n=await a.json();return console.log("API Response:",n),n}catch(e){throw console.error("Failed to create key:",e),e}},eb=async e=>{try{let t=r?"".concat(r,"/credentials"):"/credentials";console.log("in credentialListCall");let o=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok){let e=await o.text();throw l(e),Error("Network response was not ok")}let a=await o.json();return console.log("/credentials API Response:",a),a}catch(e){throw console.error("Failed to create key:",e),e}},ex=async(e,t,o)=>{try{let a=r?"".concat(r,"/credentials"):"/credentials";t?a+="/by_name/".concat(t):o&&(a+="/by_model/".concat(o)),console.log("in credentialListCall");let n=await fetch(a,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!n.ok){let e=await n.text();throw l(e),Error("Network response was not ok")}let c=await n.json();return console.log("/credentials API Response:",c),c}catch(e){throw console.error("Failed to create key:",e),e}},eB=async(e,t)=>{try{let o=r?"".concat(r,"/credentials/").concat(t):"/credentials/".concat(t);console.log("in credentialDeleteCall:",t);let a=await fetch(o,{method:"DELETE",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!a.ok){let e=await a.text();throw l(e),Error("Network response was not ok")}let n=await a.json();return console.log(n),n}catch(e){throw console.error("Failed to delete key:",e),e}},eO=async(e,t,o)=>{try{if(console.log("Form Values in credentialUpdateCall:",o),o.metadata){console.log("formValues.metadata:",o.metadata);try{o.metadata=JSON.parse(o.metadata)}catch(e){throw Error("Failed to parse metadata: "+e)}}let a=r?"".concat(r,"/credentials/").concat(t):"/credentials/".concat(t),n=await fetch(a,{method:"PATCH",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({...o})});if(!n.ok){let e=await n.text();throw l(e),console.error("Error response from the server:",e),Error("Network response was not ok")}let c=await n.json();return console.log("API Response:",c),c}catch(e){throw console.error("Failed to create key:",e),e}},eP=async(e,t)=>{try{if(console.log("Form Values in keyUpdateCall:",t),t.model_tpm_limit){console.log("formValues.model_tpm_limit:",t.model_tpm_limit);try{t.model_tpm_limit=JSON.parse(t.model_tpm_limit)}catch(e){throw Error("Failed to parse model_tpm_limit: "+e)}}if(t.model_rpm_limit){console.log("formValues.model_rpm_limit:",t.model_rpm_limit);try{t.model_rpm_limit=JSON.parse(t.model_rpm_limit)}catch(e){throw Error("Failed to parse model_rpm_limit: "+e)}}let o=r?"".concat(r,"/key/update"):"/key/update",a=await fetch(o,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({...t})});if(!a.ok){let e=await a.text();throw l(e),console.error("Error response from the server:",e),Error("Network response was not ok")}let n=await a.json();return console.log("Update key Response:",n),n}catch(e){throw console.error("Failed to create key:",e),e}},eG=async(e,t)=>{try{console.log("Form Values in teamUpateCall:",t);let o=r?"".concat(r,"/team/update"):"/team/update",n=await fetch(o,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({...t})});if(!n.ok){let e=await n.text();throw l(e),console.error("Error response from the server:",e),a.ZP.error("Failed to update team settings: "+e),Error(e)}let c=await n.json();return console.log("Update Team Response:",c),c}catch(e){throw console.error("Failed to update team:",e),e}},eJ=async(e,t,o)=>{try{console.log("Form Values in modelUpateCall:",t);let a=r?"".concat(r,"/model/").concat(o,"/update"):"/model/".concat(o,"/update"),n=await fetch(a,{method:"PATCH",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({...t})});if(!n.ok){let e=await n.text();throw l(e),console.error("Error update from the server:",e),Error("Network response was not ok")}let c=await n.json();return console.log("Update model Response:",c),c}catch(e){throw console.error("Failed to update model:",e),e}},eA=async(e,t,o)=>{try{console.log("Form Values in teamMemberAddCall:",o);let n=r?"".concat(r,"/team/member_add"):"/team/member_add",c=await fetch(n,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({team_id:t,member:o})});if(!c.ok){var a;let e=await c.text(),t={};try{t=JSON.parse(e)}catch(t){console.warn("Failed to parse error body as JSON:",e)}let o=(null==t?void 0:null===(a=t.detail)||void 0===a?void 0:a.error)||"Failed to add team member",r=Error(o);throw r.raw=t,r}let s=await c.json();return console.log("API Response:",s),s}catch(e){throw console.error("Failed to create key:",e),e}},eI=async(e,t,o)=>{try{console.log("Form Values in teamMemberUpdateCall:",o);let n=r?"".concat(r,"/team/member_update"):"/team/member_update",c=await fetch(n,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({team_id:t,role:o.role,user_id:o.user_id})});if(!c.ok){var a;let e=await c.text(),t={};try{t=JSON.parse(e)}catch(t){console.warn("Failed to parse error body as JSON:",e)}let o=(null==t?void 0:null===(a=t.detail)||void 0===a?void 0:a.error)||"Failed to add team member",r=Error(o);throw r.raw=t,r}let s=await c.json();return console.log("API Response:",s),s}catch(e){throw console.error("Failed to update team member:",e),e}},eR=async(e,t,o)=>{try{console.log("Form Values in teamMemberAddCall:",o);let a=r?"".concat(r,"/team/member_delete"):"/team/member_delete",n=await fetch(a,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({team_id:t,...void 0!==o.user_email&&{user_email:o.user_email},...void 0!==o.user_id&&{user_id:o.user_id}})});if(!n.ok){let e=await n.text();throw l(e),console.error("Error response from the server:",e),Error("Network response was not ok")}let c=await n.json();return console.log("API Response:",c),c}catch(e){throw console.error("Failed to create key:",e),e}},eU=async(e,t,o)=>{try{console.log("Form Values in teamMemberAddCall:",o);let a=r?"".concat(r,"/organization/member_add"):"/organization/member_add",n=await fetch(a,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({organization_id:t,member:o})});if(!n.ok){let e=await n.text();throw l(e),console.error("Error response from the server:",e),Error(e)}let c=await n.json();return console.log("API Response:",c),c}catch(e){throw console.error("Failed to create organization member:",e),e}},ez=async(e,t,o)=>{try{console.log("Form Values in organizationMemberDeleteCall:",o);let a=r?"".concat(r,"/organization/member_delete"):"/organization/member_delete",n=await fetch(a,{method:"DELETE",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({organization_id:t,user_id:o})});if(!n.ok){let e=await n.text();throw l(e),console.error("Error response from the server:",e),Error("Network response was not ok")}let c=await n.json();return console.log("API Response:",c),c}catch(e){throw console.error("Failed to delete organization member:",e),e}},eV=async(e,t,o)=>{try{console.log("Form Values in organizationMemberUpdateCall:",o);let a=r?"".concat(r,"/organization/member_update"):"/organization/member_update",n=await fetch(a,{method:"PATCH",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({organization_id:t,...o})});if(!n.ok){let e=await n.text();throw l(e),console.error("Error response from the server:",e),Error("Network response was not ok")}let c=await n.json();return console.log("API Response:",c),c}catch(e){throw console.error("Failed to update organization member:",e),e}},eL=async(e,t,o)=>{try{console.log("Form Values in userUpdateUserCall:",t);let a=r?"".concat(r,"/user/update"):"/user/update",n={...t};null!==o&&(n.user_role=o),n=JSON.stringify(n);let c=await fetch(a,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:n});if(!c.ok){let e=await c.text();throw l(e),console.error("Error response from the server:",e),Error("Network response was not ok")}let s=await c.json();return console.log("API Response:",s),s}catch(e){throw console.error("Failed to create key:",e),e}},eM=async(e,t)=>{try{let o=r?"".concat(r,"/health/services?service=").concat(t):"/health/services?service=".concat(t);console.log("Checking Slack Budget Alerts service health");let n=await fetch(o,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!n.ok){let e=await n.text();throw l(e),Error(e)}let c=await n.json();return a.ZP.success("Test request to ".concat(t," made - check logs/alerts on ").concat(t," to verify")),c}catch(e){throw console.error("Failed to perform health check:",e),e}},eD=async e=>{try{let t=r?"".concat(r,"/budget/list"):"/budget/list",o=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok){let e=await o.text();throw l(e),Error("Network response was not ok")}return await o.json()}catch(e){throw console.error("Failed to get callbacks:",e),e}},eZ=async(e,t,o)=>{try{let t=r?"".concat(r,"/get/config/callbacks"):"/get/config/callbacks",o=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok){let e=await o.text();throw l(e),Error("Network response was not ok")}return await o.json()}catch(e){throw console.error("Failed to get callbacks:",e),e}},eH=async e=>{try{let t=r?"".concat(r,"/config/list?config_type=general_settings"):"/config/list?config_type=general_settings",o=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok){let e=await o.text();throw l(e),Error("Network response was not ok")}return await o.json()}catch(e){throw console.error("Failed to get callbacks:",e),e}},eq=async e=>{try{let t=r?"".concat(r,"/config/pass_through_endpoint"):"/config/pass_through_endpoint",o=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok){let e=await o.text();throw l(e),Error("Network response was not ok")}return await o.json()}catch(e){throw console.error("Failed to get callbacks:",e),e}},eY=async(e,t)=>{try{let o=r?"".concat(r,"/config/field/info?field_name=").concat(t):"/config/field/info?field_name=".concat(t),a=await fetch(o,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!a.ok)throw await a.text(),Error("Network response was not ok");return await a.json()}catch(e){throw console.error("Failed to set callbacks:",e),e}},eX=async(e,t)=>{try{let o=r?"".concat(r,"/config/pass_through_endpoint"):"/config/pass_through_endpoint",a=await fetch(o,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({...t})});if(!a.ok){let e=await a.text();throw l(e),Error("Network response was not ok")}return await a.json()}catch(e){throw console.error("Failed to set callbacks:",e),e}},e$=async(e,t,o)=>{try{let n=r?"".concat(r,"/config/field/update"):"/config/field/update",c=await fetch(n,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({field_name:t,field_value:o,config_type:"general_settings"})});if(!c.ok){let e=await c.text();throw l(e),Error("Network response was not ok")}let s=await c.json();return a.ZP.success("Successfully updated value!"),s}catch(e){throw console.error("Failed to set callbacks:",e),e}},eK=async(e,t)=>{try{let o=r?"".concat(r,"/config/field/delete"):"/config/field/delete",n=await fetch(o,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({field_name:t,config_type:"general_settings"})});if(!n.ok){let e=await n.text();throw l(e),Error("Network response was not ok")}let c=await n.json();return a.ZP.success("Field reset on proxy"),c}catch(e){throw console.error("Failed to get callbacks:",e),e}},eW=async(e,t)=>{try{let o=r?"".concat(r,"/config/pass_through_endpoint?endpoint_id=").concat(t):"/config/pass_through_endpoint".concat(t),a=await fetch(o,{method:"DELETE",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!a.ok){let e=await a.text();throw l(e),Error("Network response was not ok")}return await a.json()}catch(e){throw console.error("Failed to get callbacks:",e),e}},eQ=async(e,t)=>{try{let o=r?"".concat(r,"/config/update"):"/config/update",a=await fetch(o,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({...t})});if(!a.ok){let e=await a.text();throw l(e),Error("Network response was not ok")}return await a.json()}catch(e){throw console.error("Failed to set callbacks:",e),e}},e0=async e=>{try{let t=r?"".concat(r,"/health"):"/health",o=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok){let e=await o.text();throw l(e),Error("Network response was not ok")}return await o.json()}catch(e){throw console.error("Failed to call /health:",e),e}},e1=async(e,t)=>{try{let o=r?"".concat(r,"/health?model=").concat(encodeURIComponent(t)):"/health?model=".concat(encodeURIComponent(t)),a=await fetch(o,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!a.ok){let e=await a.text();throw Error(e||"Network response was not ok")}return await a.json()}catch(e){throw console.error("Failed to call /health for model ".concat(t,":"),e),e}},e2=async e=>{try{let t=r?"".concat(r,"/cache/ping"):"/cache/ping",o=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok){let e=await o.text();throw l(e),Error(e)}return await o.json()}catch(e){throw console.error("Failed to call /cache/ping:",e),e}},e3=async e=>{try{let t=r?"".concat(r,"/health/latest"):"/health/latest",o=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok){let e=await o.text();throw l(e),Error(e)}return await o.json()}catch(e){throw console.error("Failed to call /health/latest:",e),e}},e4=async e=>{try{console.log("Getting proxy UI settings"),console.log("proxyBaseUrl in getProxyUISettings:",r);let t=r?"".concat(r,"/sso/get/ui_settings"):"/sso/get/ui_settings",o=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok)throw await o.text(),Error("Network response was not ok");return await o.json()}catch(e){throw console.error("Failed to get callbacks:",e),e}},e5=async e=>{try{let t=r?"".concat(r,"/v2/guardrails/list"):"/v2/guardrails/list",o=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok){let e=await o.text();throw l(e),Error("Network response was not ok")}return await o.json()}catch(e){throw console.error("Failed to get guardrails list:",e),e}},e6=async(e,t)=>{try{let o=r?"".concat(r,"/guardrails"):"/guardrails",a=await fetch(o,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({guardrail:t})});if(!a.ok){let e=await a.text();throw l(e),Error(e)}let n=await a.json();return console.log("Create guardrail response:",n),n}catch(e){throw console.error("Failed to create guardrail:",e),e}},e8=async(e,t,o)=>{try{let a=r?"".concat(r,"/spend/logs/ui/").concat(t,"?start_date=").concat(encodeURIComponent(o)):"/spend/logs/ui/".concat(t,"?start_date=").concat(encodeURIComponent(o));console.log("Fetching log details from:",a);let n=await fetch(a,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!n.ok){let e=await n.text();throw l(e),Error("Network response was not ok")}let c=await n.json();return console.log("Fetched log details:",c),c}catch(e){throw console.error("Failed to fetch log details:",e),e}},e9=async e=>{try{let t=r?"".concat(r,"/get/internal_user_settings"):"/get/internal_user_settings";console.log("Fetching SSO settings from:",t);let o=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok){let e=await o.text();throw l(e),Error("Network response was not ok")}let a=await o.json();return console.log("Fetched SSO settings:",a),a}catch(e){throw console.error("Failed to fetch SSO settings:",e),e}},e7=async(e,t)=>{try{let o=r?"".concat(r,"/update/internal_user_settings"):"/update/internal_user_settings";console.log("Updating internal user settings:",t);let n=await fetch(o,{method:"PATCH",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify(t)});if(!n.ok){let e=await n.text();throw l(e),Error("Network response was not ok")}let c=await n.json();return console.log("Updated internal user settings:",c),a.ZP.success("Internal user settings updated successfully"),c}catch(e){throw console.error("Failed to update internal user settings:",e),e}},te=async e=>{try{let t=r?"".concat(r,"/v1/mcp/server"):"/v1/mcp/server";console.log("Fetching MCP servers from:",t);let o=await fetch(t,{method:s.GET,headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok){let e=await o.text();throw l(e),Error("Network response was not ok")}let a=await o.json();return console.log("Fetched MCP servers:",a),a}catch(e){throw console.error("Failed to fetch MCP servers:",e),e}},tt=async(e,t)=>{try{console.log("Form Values in createMCPServer:",t);let o=r?"".concat(r,"/v1/mcp/server"):"/v1/mcp/server",a=await fetch(o,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({...t})});if(!a.ok){let e=await a.text();throw l(e),console.error("Error response from the server:",e),Error("Network response was not ok")}let n=await a.json();return console.log("API Response:",n),n}catch(e){throw console.error("Failed to create key:",e),e}},to=async(e,t)=>{try{let o=r?"".concat(r,"/v1/mcp/server"):"/v1/mcp/server",a=await fetch(o,{method:"PUT",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify(t)});if(!a.ok){let e=await a.text();throw l(e),Error("Network response was not ok")}return await a.json()}catch(e){throw console.error("Failed to update MCP server:",e),e}},ta=async(e,t)=>{try{let o=(r?"".concat(r):"")+"/v1/mcp/server/".concat(t);console.log("in deleteMCPServer:",t);let a=await fetch(o,{method:s.DELETE,headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!a.ok){let e=await a.text();throw l(e),Error("Network response was not ok")}}catch(e){throw console.error("Failed to delete key:",e),e}},tr=async(e,t)=>{try{let o=r?"".concat(r,"/mcp-rest/tools/list?server_id=").concat(t):"/mcp-rest/tools/list?server_id=".concat(t);console.log("Fetching MCP tools from:",o);let a=await fetch(o,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!a.ok){let e=await a.text();throw l(e),Error("Network response was not ok")}let n=await a.json();return console.log("Fetched MCP tools:",n),n}catch(e){throw console.error("Failed to fetch MCP tools:",e),e}},tn=async(e,t,o,a)=>{try{let n=r?"".concat(r,"/mcp-rest/tools/call"):"/mcp-rest/tools/call";console.log("Calling MCP tool:",t,"with arguments:",o);let c=await fetch(n,{method:"POST",headers:{[d]:"Bearer ".concat(e),"x-mcp-auth":a,"Content-Type":"application/json"},body:JSON.stringify({name:t,arguments:o})});if(!c.ok){let e=await c.text();throw l(e),Error("Network response was not ok")}let s=await c.json();return console.log("MCP tool call response:",s),s}catch(e){throw console.error("Failed to call MCP tool:",e),e}},tc=async(e,t)=>{try{let o=r?"".concat(r,"/tag/new"):"/tag/new",a=await fetch(o,{method:"POST",headers:{"Content-Type":"application/json",Authorization:"Bearer ".concat(e)},body:JSON.stringify(t)});if(!a.ok){let e=await a.text();await l(e);return}return await a.json()}catch(e){throw console.error("Error creating tag:",e),e}},ts=async(e,t)=>{try{let o=r?"".concat(r,"/tag/update"):"/tag/update",a=await fetch(o,{method:"POST",headers:{"Content-Type":"application/json",Authorization:"Bearer ".concat(e)},body:JSON.stringify(t)});if(!a.ok){let e=await a.text();await l(e);return}return await a.json()}catch(e){throw console.error("Error updating tag:",e),e}},ti=async(e,t)=>{try{let o=r?"".concat(r,"/tag/info"):"/tag/info",a=await fetch(o,{method:"POST",headers:{"Content-Type":"application/json",Authorization:"Bearer ".concat(e)},body:JSON.stringify({names:t})});if(!a.ok){let e=await a.text();return await l(e),{}}return await a.json()}catch(e){throw console.error("Error getting tag info:",e),e}},tl=async e=>{try{let t=r?"".concat(r,"/tag/list"):"/tag/list",o=await fetch(t,{method:"GET",headers:{Authorization:"Bearer ".concat(e)}});if(!o.ok){let e=await o.text();return await l(e),{}}return await o.json()}catch(e){throw console.error("Error listing tags:",e),e}},td=async(e,t)=>{try{let o=r?"".concat(r,"/tag/delete"):"/tag/delete",a=await fetch(o,{method:"POST",headers:{"Content-Type":"application/json",Authorization:"Bearer ".concat(e)},body:JSON.stringify({name:t})});if(!a.ok){let e=await a.text();await l(e);return}return await a.json()}catch(e){throw console.error("Error deleting tag:",e),e}},th=async e=>{try{let t=r?"".concat(r,"/get/default_team_settings"):"/get/default_team_settings";console.log("Fetching default team settings from:",t);let o=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok){let e=await o.text();throw l(e),Error("Network response was not ok")}let a=await o.json();return console.log("Fetched default team settings:",a),a}catch(e){throw console.error("Failed to fetch default team settings:",e),e}},tp=async(e,t)=>{try{let o=r?"".concat(r,"/update/default_team_settings"):"/update/default_team_settings";console.log("Updating default team settings:",t);let n=await fetch(o,{method:"PATCH",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify(t)});if(!n.ok){let e=await n.text();throw l(e),Error("Network response was not ok")}let c=await n.json();return console.log("Updated default team settings:",c),a.ZP.success("Default team settings updated successfully"),c}catch(e){throw console.error("Failed to update default team settings:",e),e}},tw=async(e,t)=>{try{let o=r?"".concat(r,"/team/permissions_list?team_id=").concat(t):"/team/permissions_list?team_id=".concat(t),a=await fetch(o,{method:"GET",headers:{"Content-Type":"application/json",Authorization:"Bearer ".concat(e)}});if(!a.ok){let e=await a.text();throw l(e),Error("Network response was not ok")}let n=await a.json();return console.log("Team permissions response:",n),n}catch(e){throw console.error("Failed to get team permissions:",e),e}},tu=async(e,t,o)=>{try{let a=r?"".concat(r,"/team/permissions_update"):"/team/permissions_update",n=await fetch(a,{method:"POST",headers:{"Content-Type":"application/json",Authorization:"Bearer ".concat(e)},body:JSON.stringify({team_id:t,team_member_permissions:o})});if(!n.ok){let e=await n.text();throw l(e),Error("Network response was not ok")}let c=await n.json();return console.log("Team permissions response:",c),c}catch(e){throw console.error("Failed to update team permissions:",e),e}},tg=async(e,t)=>{try{let o=r?"".concat(r,"/spend/logs/session/ui?session_id=").concat(encodeURIComponent(t)):"/spend/logs/session/ui?session_id=".concat(encodeURIComponent(t)),a=await fetch(o,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!a.ok){let e=await a.text();throw l(e),Error("Network response was not ok")}return await a.json()}catch(e){throw console.error("Failed to fetch session logs:",e),e}},tf=async(e,t)=>{try{let o=r?"".concat(r,"/vector_store/new"):"/vector_store/new",a=await fetch(o,{method:"POST",headers:{"Content-Type":"application/json",Authorization:"Bearer ".concat(e)},body:JSON.stringify(t)});if(!a.ok){let e=await a.json();throw Error(e.detail||"Failed to create vector store")}return await a.json()}catch(e){throw console.error("Error creating vector store:",e),e}},ty=async function(e){arguments.length>1&&void 0!==arguments[1]&&arguments[1],arguments.length>2&&void 0!==arguments[2]&&arguments[2];try{let t=r?"".concat(r,"/vector_store/list"):"/vector_store/list",o=await fetch(t,{method:"GET",headers:{"Content-Type":"application/json",Authorization:"Bearer ".concat(e)}});if(!o.ok){let e=await o.json();throw Error(e.detail||"Failed to list vector stores")}return await o.json()}catch(e){throw console.error("Error listing vector stores:",e),e}},tm=async(e,t)=>{try{let o=r?"".concat(r,"/vector_store/delete"):"/vector_store/delete",a=await fetch(o,{method:"POST",headers:{"Content-Type":"application/json",Authorization:"Bearer ".concat(e)},body:JSON.stringify({vector_store_id:t})});if(!a.ok){let e=await a.json();throw Error(e.detail||"Failed to delete vector store")}return await a.json()}catch(e){throw console.error("Error deleting vector store:",e),e}},tk=async e=>{try{let t=r?"".concat(r,"/email/event_settings"):"/email/event_settings",o=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok){let e=await o.text();throw l(e),Error("Failed to get email event settings")}let a=await o.json();return console.log("Email event settings response:",a),a}catch(e){throw console.error("Failed to get email event settings:",e),e}},t_=async(e,t)=>{try{let o=r?"".concat(r,"/email/event_settings"):"/email/event_settings",a=await fetch(o,{method:"PATCH",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify(t)});if(!a.ok){let e=await a.text();throw l(e),Error("Failed to update email event settings")}let n=await a.json();return console.log("Update email event settings response:",n),n}catch(e){throw console.error("Failed to update email event settings:",e),e}},tT=async e=>{try{let t=r?"".concat(r,"/email/event_settings/reset"):"/email/event_settings/reset",o=await fetch(t,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok){let e=await o.text();throw l(e),Error("Failed to reset email event settings")}let a=await o.json();return console.log("Reset email event settings response:",a),a}catch(e){throw console.error("Failed to reset email event settings:",e),e}},tj=async(e,t)=>{try{let o=r?"".concat(r,"/guardrails/").concat(t):"/guardrails/".concat(t),a=await fetch(o,{method:"DELETE",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!a.ok){let e=await a.text();throw l(e),Error(e)}let n=await a.json();return console.log("Delete guardrail response:",n),n}catch(e){throw console.error("Failed to delete guardrail:",e),e}},tE=async e=>{try{let t=r?"".concat(r,"/guardrails/ui/add_guardrail_settings"):"/guardrails/ui/add_guardrail_settings",o=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok){let e=await o.text();throw l(e),Error("Failed to get guardrail UI settings")}let a=await o.json();return console.log("Guardrail UI settings response:",a),a}catch(e){throw console.error("Failed to get guardrail UI settings:",e),e}},tC=async e=>{try{let t=r?"".concat(r,"/guardrails/ui/provider_specific_params"):"/guardrails/ui/provider_specific_params",o=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok){let e=await o.text();throw l(e),Error("Failed to get guardrail provider specific parameters")}let a=await o.json();return console.log("Guardrail provider specific params response:",a),a}catch(e){throw console.error("Failed to get guardrail provider specific parameters:",e),e}},tS=async(e,t)=>{try{let o=r?"".concat(r,"/guardrails/").concat(t,"/info"):"/guardrails/".concat(t,"/info"),a=await fetch(o,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!a.ok){let e=await a.text();throw l(e),Error("Failed to get guardrail info")}let n=await a.json();return console.log("Guardrail info response:",n),n}catch(e){throw console.error("Failed to get guardrail info:",e),e}},tN=async(e,t,o)=>{try{let a=r?"".concat(r,"/guardrails/").concat(t):"/guardrails/".concat(t),n=await fetch(a,{method:"PATCH",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify(o)});if(!n.ok){let e=await n.text();throw l(e),Error("Failed to update guardrail")}let c=await n.json();return console.log("Update guardrail response:",c),c}catch(e){throw console.error("Failed to update guardrail:",e),e}},tv=async e=>{try{let t=r?"".concat(r,"/get/sso_settings"):"/get/sso_settings";console.log("Fetching SSO configuration from:",t);let o=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!o.ok){let e=await o.text();throw l(e),Error("Network response was not ok")}let a=await o.json();return console.log("Fetched SSO configuration:",a),a}catch(e){throw console.error("Failed to fetch SSO configuration:",e),e}},tF=async(e,t)=>{try{let o=r?"".concat(r,"/update/sso_settings"):"/update/sso_settings";console.log("Updating SSO configuration:",t);let a=await fetch(o,{method:"PATCH",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify(t)});if(!a.ok){let e=await a.text();throw l(e),Error("Network response was not ok")}let n=await a.json();return console.log("Updated SSO configuration:",n),n}catch(e){throw console.error("Failed to update SSO configuration:",e),e}},tb=async(e,t,o,a,n)=>{try{let t=r?"".concat(r,"/audit"):"/audit",o=new URLSearchParams;a&&o.append("page",a.toString()),n&&o.append("page_size",n.toString());let c=o.toString();c&&(t+="?".concat(c));let s=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"}});if(!s.ok){let e=await s.text();throw l(e),Error("Network response was not ok")}return await s.json()}catch(e){throw console.error("Failed to fetch audit logs:",e),e}},tx=async e=>{try{let t=r?"".concat(r,"/user/available_users"):"/user/available_users",o=await fetch(t,{method:"GET",headers:{[d]:"Bearer ".concat(e)}});if(!o.ok){if(404===o.status)return null;let e=await o.text();throw l(e),Error("Network response was not ok")}return await o.json()}catch(e){throw console.error("Failed to fetch remaining users:",e),e}},tB=async(e,t,o)=>{try{let n=r?"".concat(r,"/config/pass_through_endpoint/").concat(encodeURIComponent(t)):"/config/pass_through_endpoint/".concat(encodeURIComponent(t)),c=await fetch(n,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify(o)});if(!c.ok){let e=await c.text();throw l(e),Error("Network response was not ok")}let s=await c.json();return a.ZP.success("Pass through endpoint updated successfully"),s}catch(e){throw console.error("Failed to update pass through endpoint:",e),e}},tO=async(e,t)=>{try{let o=r?"".concat(r,"/config/callback/delete"):"/config/callback/delete",a=await fetch(o,{method:"POST",headers:{[d]:"Bearer ".concat(e),"Content-Type":"application/json"},body:JSON.stringify({callback_name:t})});if(!a.ok){let e=await a.text();throw l(e),Error("Network response was not ok")}return await a.json()}catch(e){throw console.error("Failed to delete specific callback:",e),e}}}}]);
\ No newline at end of file
diff --git a/litellm/proxy/_experimental/out/_next/static/chunks/webpack-a426aae3231a8df1.js b/litellm/proxy/_experimental/out/_next/static/chunks/webpack-a426aae3231a8df1.js
index c82df116cd8d..217029081366 100644
--- a/litellm/proxy/_experimental/out/_next/static/chunks/webpack-a426aae3231a8df1.js
+++ b/litellm/proxy/_experimental/out/_next/static/chunks/webpack-a426aae3231a8df1.js
@@ -1 +1 @@
-!function(){"use strict";var e,t,n,r,o,u,i,c,f,a={},l={};function d(e){var t=l[e];if(void 0!==t)return t.exports;var n=l[e]={id:e,loaded:!1,exports:{}},r=!0;try{a[e].call(n.exports,n,n.exports,d),r=!1}finally{r&&delete l[e]}return n.loaded=!0,n.exports}d.m=a,e=[],d.O=function(t,n,r,o){if(n){o=o||0;for(var u=e.length;u>0&&e[u-1][2]>o;u--)e[u]=e[u-1];e[u]=[n,r,o];return}for(var i=1/0,u=0;u=o&&Object.keys(d.O).every(function(e){return d.O[e](n[f])})?n.splice(f--,1):(c=!1,o0&&e[u-1][2]>o;u--)e[u]=e[u-1];e[u]=[n,r,o];return}for(var i=1/0,u=0;u=o&&Object.keys(d.O).every(function(e){return d.O[e](n[f])})?n.splice(f--,1):(c=!1,o
-
-
+
+
+
diff --git a/litellm/proxy/_experimental/out/assets/logos/cerebras.svg b/litellm/proxy/_experimental/out/assets/logos/cerebras.svg
index 426f6430c230..1ff347220c5a 100644
--- a/litellm/proxy/_experimental/out/assets/logos/cerebras.svg
+++ b/litellm/proxy/_experimental/out/assets/logos/cerebras.svg
@@ -1,89 +1,89 @@
-
-
-
+
+
+
diff --git a/litellm/proxy/_experimental/out/assets/logos/deepseek.svg b/litellm/proxy/_experimental/out/assets/logos/deepseek.svg
index c4754047da2b..61760f13190e 100644
--- a/litellm/proxy/_experimental/out/assets/logos/deepseek.svg
+++ b/litellm/proxy/_experimental/out/assets/logos/deepseek.svg
@@ -1,25 +1,25 @@
-
-
-
+
+
+
diff --git a/litellm/proxy/_experimental/out/assets/logos/perplexity-ai.svg b/litellm/proxy/_experimental/out/assets/logos/perplexity-ai.svg
index e828b6dfbf13..e3a32be98098 100644
--- a/litellm/proxy/_experimental/out/assets/logos/perplexity-ai.svg
+++ b/litellm/proxy/_experimental/out/assets/logos/perplexity-ai.svg
@@ -1,16 +1,16 @@
-
-
-