From 1a49aa377992a40d869b1891412832854c58df03 Mon Sep 17 00:00:00 2001 From: faraphel Date: Fri, 10 Jan 2025 19:12:21 +0100 Subject: [PATCH] added auto-detection for the output type on the client side to be able to download binary files such as videos --- samples/models/dummy/config.json | 2 ++ source/manager/ModelManager.py | 10 +++++++++- source/model/PythonModel.py | 6 ++++++ source/utils/__init__.py | 1 + source/utils/mimetypes.py | 21 +++++++++++++++++++++ 5 files changed, 39 insertions(+), 1 deletion(-) create mode 100644 source/utils/mimetypes.py diff --git a/samples/models/dummy/config.json b/samples/models/dummy/config.json index 09be48c..c12d549 100644 --- a/samples/models/dummy/config.json +++ b/samples/models/dummy/config.json @@ -3,6 +3,8 @@ "tags": ["dummy"], "file": "model.py", + "output_type": "video/mp4", + "inputs": { "file": {"type": "file"} } diff --git a/source/manager/ModelManager.py b/source/manager/ModelManager.py index f6e7268..6313ab9 100644 --- a/source/manager/ModelManager.py +++ b/source/manager/ModelManager.py @@ -1,3 +1,4 @@ +import asyncio import json import os import typing @@ -10,6 +11,11 @@ from source import model, api class ModelManager: + """ + The model manager + Load the list of models available, ensure that only one model is loaded at the same time. + """ + def __init__(self, application: api.Application, model_library: os.PathLike | str): self.application: api.Application = application self.model_library: Path = Path(model_library) @@ -20,7 +26,9 @@ class ModelManager: self.models: dict[str, model.base.BaseModel] = {} # the currently loaded model - # TODO(Faraphel): load more than one model at a time ? require a way more complex manager to handle memory issue + # TODO(Faraphel): load more than one model at a time ? + # would require a way more complex manager to handle memory issue + # having two calculations at the same time might not be worth it either self.current_loaded_model: typing.Optional[model.base.BaseModel] = None # lock to avoid concurrent inference and concurrent model loading and unloading diff --git a/source/model/PythonModel.py b/source/model/PythonModel.py index 2d91df1..c23bceb 100644 --- a/source/model/PythonModel.py +++ b/source/model/PythonModel.py @@ -77,6 +77,12 @@ class PythonModel(base.BaseModel): infer_api, methods=["POST"], tags=self.tags, + # summary=..., + # description=..., + response_class=fastapi.responses.StreamingResponse, + responses={ + 200: {"content": {self.output_type: {}}} + }, ) def _load(self) -> None: diff --git a/source/utils/__init__.py b/source/utils/__init__.py index f6bd50e..1b48283 100644 --- a/source/utils/__init__.py +++ b/source/utils/__init__.py @@ -1 +1,2 @@ from . import parameters +from . import mimetypes diff --git a/source/utils/mimetypes.py b/source/utils/mimetypes.py new file mode 100644 index 0000000..2b54dd8 --- /dev/null +++ b/source/utils/mimetypes.py @@ -0,0 +1,21 @@ +def is_textlike(mimetype: str) -> bool: + """ + Determinate if a mimetype is considered as holding text + :param mimetype: the mimetype to check + :return: True if the mimetype represent text, False otherwise + """ + + # check the family of the mimetype + if mimetype.startswith("text/"): + return True + + # check applications formats that are text formatted + if mimetype in [ + "application/xml", + "application/json", + "application/javascript" + ]: + return True + + # otherwise consider the file as non-text + return False \ No newline at end of file