From 7a3a704a5d5ead92430a23d779122ad0f29a8237 Mon Sep 17 00:00:00 2001 From: Ishaan Jaff Date: Mon, 23 Jun 2025 10:05:38 -0700 Subject: [PATCH 1/3] TestPerplexityWebSearch --- .../llms/perplexity/test_perplexity.py | 27 +++++++++++++++++++ 1 file changed, 27 insertions(+) create mode 100644 tests/test_litellm/llms/perplexity/test_perplexity.py diff --git a/tests/test_litellm/llms/perplexity/test_perplexity.py b/tests/test_litellm/llms/perplexity/test_perplexity.py new file mode 100644 index 000000000000..ce1e40d20f1e --- /dev/null +++ b/tests/test_litellm/llms/perplexity/test_perplexity.py @@ -0,0 +1,27 @@ +import os +import sys + +from pydantic import BaseModel + +sys.path.insert(0, os.path.abspath("../../..")) + +import pytest + + +class TestPerplexityWebSearch: + """Test suite for Perplexity web search functionality.""" + + @pytest.mark.parametrize( + "model", + ["perplexity/sonar", "perplexity/sonar-pro"] + ) + def test_web_search_options_in_supported_params(self, model): + """ + Test that web_search_options is in the list of supported parameters for Perplexity sonar models + """ + from litellm.llms.perplexity.chat.transformation import PerplexityChatConfig + + config = PerplexityChatConfig() + supported_params = config.get_supported_openai_params(model=model) + + assert "web_search_options" in supported_params, f"web_search_options should be supported for {model}" From 7ec0ab50eee939c9160644dfb91bd2346f15d4f0 Mon Sep 17 00:00:00 2001 From: Ishaan Jaff Date: Mon, 23 Jun 2025 10:06:21 -0700 Subject: [PATCH 2/3] use supports_web_search --- litellm/llms/perplexity/chat/transformation.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/litellm/llms/perplexity/chat/transformation.py b/litellm/llms/perplexity/chat/transformation.py index 4ce2df51b6e4..a81400870a79 100644 --- a/litellm/llms/perplexity/chat/transformation.py +++ b/litellm/llms/perplexity/chat/transformation.py @@ -55,4 +55,13 @@ def get_supported_openai_params(self, model: str) -> list: base_openai_params.append("reasoning_effort") except Exception as e: verbose_logger.debug(f"Error checking if model supports reasoning: {e}") + + try: + if litellm.supports_web_search( + model=model, custom_llm_provider=self.custom_llm_provider + ): + base_openai_params.append("web_search_options") + except Exception as e: + verbose_logger.debug(f"Error checking if model supports web search: {e}") + return base_openai_params From 26062e26bcf31e1d81cb305a9f927786d894e93e Mon Sep 17 00:00:00 2001 From: Ishaan Jaff Date: Mon, 23 Jun 2025 10:10:13 -0700 Subject: [PATCH 3/3] Update tests/test_litellm/llms/perplexity/test_perplexity.py Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- tests/test_litellm/llms/perplexity/test_perplexity.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/tests/test_litellm/llms/perplexity/test_perplexity.py b/tests/test_litellm/llms/perplexity/test_perplexity.py index ce1e40d20f1e..5c8eead4d6da 100644 --- a/tests/test_litellm/llms/perplexity/test_perplexity.py +++ b/tests/test_litellm/llms/perplexity/test_perplexity.py @@ -1,8 +1,6 @@ import os import sys -from pydantic import BaseModel - sys.path.insert(0, os.path.abspath("../../..")) import pytest