Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 0 additions & 16 deletions libs/core/langchain_core/language_models/chat_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -556,7 +556,6 @@ def stream(
params = self._get_invocation_params(stop=stop, **kwargs)
options = {"stop": stop, **kwargs, **ls_structured_output_format_dict}
inheritable_metadata = {
**self._get_metadata_invocation_params(params),
**(config.get("metadata") or {}),
**self._get_ls_params_with_defaults(stop=stop, **kwargs),
}
Expand Down Expand Up @@ -685,7 +684,6 @@ async def astream(
params = self._get_invocation_params(stop=stop, **kwargs)
options = {"stop": stop, **kwargs, **ls_structured_output_format_dict}
inheritable_metadata = {
**self._get_metadata_invocation_params(params),
**(config.get("metadata") or {}),
**self._get_ls_params_with_defaults(stop=stop, **kwargs),
}
Expand Down Expand Up @@ -839,18 +837,6 @@ def _get_invocation_params(
params["stop"] = stop
return {**params, **kwargs}

def _get_metadata_invocation_params(
self,
invocation_params: dict[str, Any],
) -> dict[str, Any]:
"""Filter invocation params for inclusion in run metadata."""
secret_keys = set(self.lc_secrets.keys())
return {
k: v
for k, v in invocation_params.items()
if v is not None and k not in secret_keys and k != "tools"
}

def _get_ls_params(
self,
stop: list[str] | None = None,
Expand Down Expand Up @@ -974,7 +960,6 @@ def generate(
params = self._get_invocation_params(stop=stop, **kwargs)
options = {"stop": stop, **ls_structured_output_format_dict}
inheritable_metadata = {
**self._get_metadata_invocation_params(params),
**(metadata or {}),
**self._get_ls_params_with_defaults(stop=stop, **kwargs),
}
Expand Down Expand Up @@ -1098,7 +1083,6 @@ async def agenerate(
params = self._get_invocation_params(stop=stop, **kwargs)
options = {"stop": stop, **ls_structured_output_format_dict}
inheritable_metadata = {
**self._get_metadata_invocation_params(params),
**(metadata or {}),
**self._get_ls_params_with_defaults(stop=stop, **kwargs),
}
Expand Down
147 changes: 0 additions & 147 deletions libs/core/tests/unit_tests/language_models/chat_models/test_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -1390,150 +1390,3 @@ def test_generate_response_from_error_handles_streaming_response_failure() -> No
assert metadata["body"] is None
assert metadata["headers"] == {"content-type": "application/json"}
assert metadata["status_code"] == 400


class FakeChatModelWithSecrets(BaseChatModel):
"""Fake chat model with lc_secrets for testing metadata filtering."""

api_key: str = "secret-key"
model: str = "test-model"
thinking: dict[str, Any] | None = None

@property
def lc_secrets(self) -> dict[str, str]:
return {"api_key": "API_KEY"}

@property
def _identifying_params(self) -> dict[str, Any]:
return {
"model": self.model,
"thinking": self.thinking,
}

@property
def _llm_type(self) -> str:
return "fake-chat-model-with-secrets"

def _generate(
self,
messages: list[BaseMessage], # noqa: ARG002
stop: list[str] | None = None, # noqa: ARG002
run_manager: CallbackManagerForLLMRun | None = None, # noqa: ARG002
**kwargs: Any, # noqa: ARG002
) -> ChatResult:
return ChatResult(
generations=[ChatGeneration(message=AIMessage(content="test response"))]
)


def test_init_params_in_metadata() -> None:
"""Test that init params are included in run metadata."""
llm = FakeChatModelWithSecrets(thinking={"type": "enabled", "budget_tokens": 10000})
with collect_runs() as cb:
llm.invoke("hi")
assert len(cb.traced_runs) == 1
run = cb.traced_runs[0]
assert run.extra is not None
metadata = run.extra.get("metadata", {})
assert "model" in metadata
assert metadata["model"] == "test-model"
assert "thinking" in metadata
assert metadata["thinking"] == {"type": "enabled", "budget_tokens": 10000}


def test_init_params_filter_none_values() -> None:
"""Test that None values are filtered from init params in metadata."""
llm = FakeChatModelWithSecrets(thinking=None)
with collect_runs() as cb:
llm.invoke("hi")
assert len(cb.traced_runs) == 1
run = cb.traced_runs[0]
assert run.extra is not None
metadata = run.extra.get("metadata", {})
assert "thinking" not in metadata


def test_init_params_filter_secrets() -> None:
"""Test that lc_secrets keys are filtered from init params in metadata."""
llm = FakeChatModelWithSecrets()
with collect_runs() as cb:
llm.invoke("hi")
assert len(cb.traced_runs) == 1
run = cb.traced_runs[0]
assert run.extra is not None
metadata = run.extra.get("metadata", {})
assert "api_key" not in metadata


def test_runtime_params_in_metadata() -> None:
"""Test that runtime invocation params (kwargs) are included in metadata."""
llm = FakeChatModelWithSecrets()
with collect_runs() as cb:
llm.invoke("hi", effort="low")
assert len(cb.traced_runs) == 1
run = cb.traced_runs[0]
assert run.extra is not None
metadata = run.extra.get("metadata", {})
assert "effort" in metadata
assert metadata["effort"] == "low"


def test_runtime_secrets_filtered_from_metadata() -> None:
"""Test that runtime secret params (kwargs) are filtered from metadata."""
llm = FakeChatModelWithSecrets()
with collect_runs() as cb:
llm.invoke("hi", api_key="runtime-secret")
assert len(cb.traced_runs) == 1
run = cb.traced_runs[0]
assert run.extra is not None
metadata = run.extra.get("metadata", {})
assert "api_key" not in metadata


def test_user_metadata_takes_precedence() -> None:
"""Test that user-provided metadata takes precedence over invocation params."""
llm = FakeChatModelWithSecrets(model="init-model")
with collect_runs() as cb:
llm.invoke("hi", config={"metadata": {"model": "user-override"}})
assert len(cb.traced_runs) == 1
run = cb.traced_runs[0]
assert run.extra is not None
metadata = run.extra.get("metadata", {})
assert metadata["model"] == "user-override"


async def test_invocation_params_in_metadata_ainvoke() -> None:
"""Test that invocation params are included in run metadata for ainvoke."""
llm = FakeChatModelWithSecrets(thinking={"type": "enabled"})
with collect_runs() as cb:
await llm.ainvoke("hi")
assert len(cb.traced_runs) == 1
run = cb.traced_runs[0]
assert run.extra is not None
metadata = run.extra.get("metadata", {})
assert "thinking" in metadata


def test_invocation_params_in_metadata_stream() -> None:
"""Test that invocation params are included in run metadata for stream."""
llm = FakeChatModelWithSecrets(thinking={"type": "enabled"})
with collect_runs() as cb:
list(llm.stream("hi"))
assert len(cb.traced_runs) == 1
run = cb.traced_runs[0]
assert run.extra is not None
metadata = run.extra.get("metadata", {})
assert "thinking" in metadata


async def test_invocation_params_in_metadata_astream() -> None:
"""Test that invocation params are included in run metadata for astream."""
llm = FakeChatModelWithSecrets(thinking={"type": "enabled"})
with collect_runs() as cb:
async for _ in llm.astream("hi"):
pass
assert len(cb.traced_runs) == 1
run = cb.traced_runs[0]
assert run.extra is not None
metadata = run.extra.get("metadata", {})
assert "thinking" in metadata
Loading