mirror of
https://git.isriupjv.fr/ISRI/ai-server
synced 2025-04-24 18:18:11 +02:00
added auto-detection for the output type on the client side to be able to download binary files such as videos
This commit is contained in:
parent
775c78c6cb
commit
1a49aa3779
5 changed files with 39 additions and 1 deletions
|
@ -3,6 +3,8 @@
|
||||||
"tags": ["dummy"],
|
"tags": ["dummy"],
|
||||||
"file": "model.py",
|
"file": "model.py",
|
||||||
|
|
||||||
|
"output_type": "video/mp4",
|
||||||
|
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"file": {"type": "file"}
|
"file": {"type": "file"}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
import asyncio
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import typing
|
import typing
|
||||||
|
@ -10,6 +11,11 @@ from source import model, api
|
||||||
|
|
||||||
|
|
||||||
class ModelManager:
|
class ModelManager:
|
||||||
|
"""
|
||||||
|
The model manager
|
||||||
|
Load the list of models available, ensure that only one model is loaded at the same time.
|
||||||
|
"""
|
||||||
|
|
||||||
def __init__(self, application: api.Application, model_library: os.PathLike | str):
|
def __init__(self, application: api.Application, model_library: os.PathLike | str):
|
||||||
self.application: api.Application = application
|
self.application: api.Application = application
|
||||||
self.model_library: Path = Path(model_library)
|
self.model_library: Path = Path(model_library)
|
||||||
|
@ -20,7 +26,9 @@ class ModelManager:
|
||||||
self.models: dict[str, model.base.BaseModel] = {}
|
self.models: dict[str, model.base.BaseModel] = {}
|
||||||
|
|
||||||
# the currently loaded model
|
# the currently loaded model
|
||||||
# TODO(Faraphel): load more than one model at a time ? require a way more complex manager to handle memory issue
|
# TODO(Faraphel): load more than one model at a time ?
|
||||||
|
# would require a way more complex manager to handle memory issue
|
||||||
|
# having two calculations at the same time might not be worth it either
|
||||||
self.current_loaded_model: typing.Optional[model.base.BaseModel] = None
|
self.current_loaded_model: typing.Optional[model.base.BaseModel] = None
|
||||||
|
|
||||||
# lock to avoid concurrent inference and concurrent model loading and unloading
|
# lock to avoid concurrent inference and concurrent model loading and unloading
|
||||||
|
|
|
@ -77,6 +77,12 @@ class PythonModel(base.BaseModel):
|
||||||
infer_api,
|
infer_api,
|
||||||
methods=["POST"],
|
methods=["POST"],
|
||||||
tags=self.tags,
|
tags=self.tags,
|
||||||
|
# summary=...,
|
||||||
|
# description=...,
|
||||||
|
response_class=fastapi.responses.StreamingResponse,
|
||||||
|
responses={
|
||||||
|
200: {"content": {self.output_type: {}}}
|
||||||
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
def _load(self) -> None:
|
def _load(self) -> None:
|
||||||
|
|
|
@ -1 +1,2 @@
|
||||||
from . import parameters
|
from . import parameters
|
||||||
|
from . import mimetypes
|
||||||
|
|
21
source/utils/mimetypes.py
Normal file
21
source/utils/mimetypes.py
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
def is_textlike(mimetype: str) -> bool:
|
||||||
|
"""
|
||||||
|
Determinate if a mimetype is considered as holding text
|
||||||
|
:param mimetype: the mimetype to check
|
||||||
|
:return: True if the mimetype represent text, False otherwise
|
||||||
|
"""
|
||||||
|
|
||||||
|
# check the family of the mimetype
|
||||||
|
if mimetype.startswith("text/"):
|
||||||
|
return True
|
||||||
|
|
||||||
|
# check applications formats that are text formatted
|
||||||
|
if mimetype in [
|
||||||
|
"application/xml",
|
||||||
|
"application/json",
|
||||||
|
"application/javascript"
|
||||||
|
]:
|
||||||
|
return True
|
||||||
|
|
||||||
|
# otherwise consider the file as non-text
|
||||||
|
return False
|
Loading…
Reference in a new issue