Skip to content

Commit 75a4321

Browse files
committed
add collective_rpc to llm engine
Signed-off-by: Yinghai Lu <[email protected]>
1 parent 5536b30 commit 75a4321

File tree

2 files changed

+12
-0
lines changed

2 files changed

+12
-0
lines changed

vllm/engine/async_llm_engine.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1238,6 +1238,12 @@ async def is_sleeping(self) -> bool:
12381238
async def add_lora(self, lora_request: LoRARequest) -> None:
12391239
self.engine.add_lora(lora_request)
12401240

1241+
async def collective_rpc(self, method: str, timeout: float | None = None, args: tuple = (), kwargs: dict | None = None):
1242+
"""
1243+
Perform a collective RPC call to the given path.
1244+
"""
1245+
return await self.engine_core.collective_rpc_async(method, timeout, args, kwargs)
1246+
12411247

12421248
# TODO(v1): Remove this class proxy when V1 goes default.
12431249
if envs.is_set("VLLM_USE_V1") and envs.VLLM_USE_V1:

vllm/v1/engine/async_llm.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -490,6 +490,12 @@ async def pin_lora(self, lora_id: int) -> bool:
490490
"""Prevent an adapter from being evicted."""
491491
return await self.engine_core.pin_lora_async(lora_id)
492492

493+
async def collective_rpc(self, method: str, timeout: float | None = None, args: tuple = (), kwargs: dict | None = None):
494+
"""
495+
Perform a collective RPC call to the given path.
496+
"""
497+
return await self.engine_core.collective_rpc_async(method, timeout, args, kwargs)
498+
493499
@property
494500
def is_running(self) -> bool:
495501
# Is None before the loop is started.

0 commit comments

Comments
 (0)