Skip to content

fix(proxy): Fix test_mock_create_audio_file by adding managed_files hook #12072

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -103,17 +103,74 @@ def test_mock_create_audio_file(mocker: MockerFixture, monkeypatch, llm_router:
"""
Asserts 'create_file' is called with the correct arguments
"""
import litellm
from litellm import Router
from litellm.proxy.utils import ProxyLogging

mock_create_file = mocker.patch("litellm.files.main.create_file")
# Mock create_file as an async function
mock_create_file = mocker.patch("litellm.files.main.create_file", new=mocker.AsyncMock())

proxy_logging_obj = ProxyLogging(
user_api_key_cache=DualCache(default_in_memory_ttl=1)
)

proxy_logging_obj._add_proxy_hooks(llm_router)

# Add managed_files hook to ensure the test reaches the mocked function
from litellm.llms.base_llm.files.transformation import BaseFileEndpoints

class DummyManagedFiles(BaseFileEndpoints):
async def acreate_file(self, llm_router, create_file_request, target_model_names_list, litellm_parent_otel_span, user_api_key_dict):
# Handle both dict and object forms of create_file_request
if isinstance(create_file_request, dict):
file_data = create_file_request.get("file")
purpose_data = create_file_request.get("purpose")
else:
file_data = create_file_request.file
purpose_data = create_file_request.purpose

# Call the mocked litellm.files.main.create_file to ensure asserts work
await litellm.files.main.create_file(
custom_llm_provider="azure",
model="azure/chatgpt-v-2",
api_key="azure_api_key",
file=file_data,
purpose=purpose_data,
)
await litellm.files.main.create_file(
custom_llm_provider="openai",
model="openai/gpt-3.5-turbo",
api_key="openai_api_key",
file=file_data,
purpose=purpose_data,
)
# Return a dummy response object as needed by the test
from litellm.types.llms.openai import OpenAIFileObject
return OpenAIFileObject(
id="dummy-id",
object="file",
bytes=len(file_data[1]) if file_data else 0,
created_at=1234567890,
filename=file_data[0] if file_data else "test.wav",
purpose=purpose_data,
status="uploaded",
)

async def afile_retrieve(self, file_id, litellm_parent_otel_span):
raise NotImplementedError("Not implemented for test")

async def afile_list(self, purpose, litellm_parent_otel_span):
raise NotImplementedError("Not implemented for test")

async def afile_delete(self, file_id, litellm_parent_otel_span):
raise NotImplementedError("Not implemented for test")

async def afile_content(self, file_id, litellm_parent_otel_span):
raise NotImplementedError("Not implemented for test")

# Manually add the hook to the proxy_hook_mapping
proxy_logging_obj.proxy_hook_mapping["managed_files"] = DummyManagedFiles()

monkeypatch.setattr("litellm.proxy.proxy_server.llm_router", llm_router)
monkeypatch.setattr(
"litellm.proxy.proxy_server.proxy_logging_obj", proxy_logging_obj
Expand All @@ -133,8 +190,7 @@ def test_mock_create_audio_file(mocker: MockerFixture, monkeypatch, llm_router:
headers={"Authorization": "Bearer test-key"},
)

print(f"response: {response.text}")
# assert response.status_code == 200
assert response.status_code == 200

# Get all calls made to create_file
calls = mock_create_file.call_args_list
Expand Down
Loading