mirror of
https://git.isriupjv.fr/ISRI/ai-server
synced 2025-04-24 18:18:11 +02:00
fixed an issue where the chat interface would only show the final token of a model answer
This commit is contained in:
parent
0034c7b31a
commit
156db5d6a1
1 changed files with 4 additions and 2 deletions
|
@ -20,7 +20,7 @@ class ChatInterface(base.BaseInterface):
|
||||||
async def send_message(self, user_message, old_messages: list[dict], system_message: str):
|
async def send_message(self, user_message, old_messages: list[dict], system_message: str):
|
||||||
# normalize the user message (the type can be wrong, especially when "edited")
|
# normalize the user message (the type can be wrong, especially when "edited")
|
||||||
if isinstance(user_message, str):
|
if isinstance(user_message, str):
|
||||||
user_message: dict = {"files": [], "text": user_message}
|
user_message: dict = {"text": user_message}
|
||||||
|
|
||||||
# copy the history to avoid modifying it
|
# copy the history to avoid modifying it
|
||||||
messages: list[dict] = old_messages.copy()
|
messages: list[dict] = old_messages.copy()
|
||||||
|
@ -41,8 +41,10 @@ class ChatInterface(base.BaseInterface):
|
||||||
})
|
})
|
||||||
|
|
||||||
# infer the message through the model
|
# infer the message through the model
|
||||||
|
assistant_message = ""
|
||||||
async for chunk in self.model.infer(messages=messages):
|
async for chunk in self.model.infer(messages=messages):
|
||||||
yield chunk.decode("utf-8")
|
assistant_message += " " + chunk.decode("utf-8")
|
||||||
|
yield assistant_message
|
||||||
|
|
||||||
def get_application(self):
|
def get_application(self):
|
||||||
# create a gradio interface
|
# create a gradio interface
|
||||||
|
|
Loading…
Reference in a new issue