Skip to content

Commit

Permalink
fix: use aclose instead of close for AsyncExitStack
Browse files Browse the repository at this point in the history
  • Loading branch information
gjpower authored Oct 15, 2024
1 parent 13d0c0c commit a69634b
Showing 1 changed file with 6 additions and 5 deletions.
11 changes: 6 additions & 5 deletions llama_cpp/server/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -326,7 +326,7 @@ async def create_completion(
def iterator() -> Iterator[llama_cpp.CreateCompletionStreamResponse]:
yield first_response
yield from iterator_or_completion
exit_stack.close()
exit_stack.aclose()

send_chan, recv_chan = anyio.create_memory_object_stream(10)
return EventSourceResponse(
Expand All @@ -336,12 +336,13 @@ def iterator() -> Iterator[llama_cpp.CreateCompletionStreamResponse]:
request=request,
inner_send_chan=send_chan,
iterator=iterator(),
on_complete=exit_stack.close,
on_complete=exit_stack.aclose,
),
sep="\n",
ping_message_factory=_ping_message_factory,
)
else:
await exit_stack.aclose()
return iterator_or_completion


Expand Down Expand Up @@ -517,7 +518,7 @@ async def create_chat_completion(
def iterator() -> Iterator[llama_cpp.ChatCompletionChunk]:
yield first_response
yield from iterator_or_completion
exit_stack.close()
exit_stack.aclose()

send_chan, recv_chan = anyio.create_memory_object_stream(10)
return EventSourceResponse(
Expand All @@ -527,13 +528,13 @@ def iterator() -> Iterator[llama_cpp.ChatCompletionChunk]:
request=request,
inner_send_chan=send_chan,
iterator=iterator(),
on_complete=exit_stack.close,
on_complete=exit_stack.aclose,
),
sep="\n",
ping_message_factory=_ping_message_factory,
)
else:
exit_stack.close()
await exit_stack.aclose()
return iterator_or_completion


Expand Down

0 comments on commit a69634b

Please sign in to comment.