Skip to content

Commit afedfc8

Browse files
authored
fix: add missing await statements for async exit_stack handling (abetlen#1858)
1 parent ea4d86a commit afedfc8

File tree

1 file changed

+3
-3
lines changed

1 file changed

+3
-3
lines changed

llama_cpp/server/app.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -318,7 +318,7 @@ async def create_completion(
318318
Iterator[llama_cpp.CreateCompletionStreamResponse],
319319
] = await run_in_threadpool(llama, **kwargs)
320320
except Exception as err:
321-
exit_stack.close()
321+
await exit_stack.aclose()
322322
raise err
323323

324324
if isinstance(iterator_or_completion, Iterator):
@@ -475,7 +475,7 @@ async def create_chat_completion(
475475
# is complete.
476476
# https://github.com/tiangolo/fastapi/issues/11143
477477
exit_stack = contextlib.AsyncExitStack()
478-
llama_proxy = exit_stack.enter_async_context(contextlib.asynccontextmanager(get_llama_proxy)())
478+
llama_proxy = await exit_stack.enter_async_context(contextlib.asynccontextmanager(get_llama_proxy)())
479479
if llama_proxy is None:
480480
raise HTTPException(
481481
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
@@ -513,7 +513,7 @@ async def create_chat_completion(
513513
llama_cpp.ChatCompletion, Iterator[llama_cpp.ChatCompletionChunk]
514514
] = await run_in_threadpool(llama.create_chat_completion, **kwargs)
515515
except Exception as err:
516-
exit_stack.close()
516+
await exit_stack.aclose()
517517
raise err
518518

519519
if isinstance(iterator_or_completion, Iterator):

0 commit comments

Comments
 (0)
pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy