Skip to content

Commit

Permalink
Fix an error in the type annotation
Browse files Browse the repository at this point in the history
  • Loading branch information
DarkLight1337 committed Sep 24, 2024
1 parent 4f62ceb commit 3d09d0f
Showing 1 changed file with 8 additions and 6 deletions.
14 changes: 8 additions & 6 deletions vllm/engine/async_llm_engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@
import time
import weakref
from functools import partial
from typing import (Any, AsyncGenerator, Callable, Dict, Iterable, List,
Mapping, Optional, Set, Tuple, Type, Union, overload)
from typing import (Any, AsyncGenerator, Callable, Coroutine, Dict, Iterable,
List, Mapping, Optional, Set, Tuple, Type, Union, overload)
from weakref import ReferenceType

import vllm.envs as envs
Expand Down Expand Up @@ -812,7 +812,7 @@ async def run_engine_loop(engine_ref: ReferenceType):
# This method does not need to be async, but kept that way
# for backwards compatibility.
@overload # DEPRECATED
async def add_request(
def add_request(
self,
request_id: str,
*,
Expand All @@ -822,11 +822,12 @@ async def add_request(
lora_request: Optional[LoRARequest] = None,
trace_headers: Optional[Mapping[str, str]] = None,
prompt_adapter_request: Optional[PromptAdapterRequest] = None,
) -> AsyncGenerator[Union[RequestOutput, EmbeddingRequestOutput], None]:
) -> Coroutine[None, None, AsyncGenerator[Union[
RequestOutput, EmbeddingRequestOutput], None]]:
...

@overload
async def add_request(
def add_request(
self,
request_id: str,
prompt: PromptType,
Expand All @@ -835,7 +836,8 @@ async def add_request(
lora_request: Optional[LoRARequest] = None,
trace_headers: Optional[Mapping[str, str]] = None,
prompt_adapter_request: Optional[PromptAdapterRequest] = None,
) -> AsyncGenerator[Union[RequestOutput, EmbeddingRequestOutput], None]:
) -> Coroutine[None, None, AsyncGenerator[Union[
RequestOutput, EmbeddingRequestOutput], None]]:
...

@deprecate_kwargs(
Expand Down

0 comments on commit 3d09d0f

Please sign in to comment.