diff --git a/pyproject.toml b/pyproject.toml index 735f5ac..fa53918 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "hatchling.build" [project] name = "flask_ml" -version = "0.2.4" +version = "0.2.5" authors = [ { name="Prasanna Lakkur Subramanyam", email="psubramanyam@umass.edu" }, { name="Atharva Kale", email="aukale@umass.edu" }, diff --git a/pyrightconfig.json b/pyrightconfig.json index 832f69e..3ba85a6 100644 --- a/pyrightconfig.json +++ b/pyrightconfig.json @@ -1,6 +1,10 @@ { "python.analysis.typeCheckingMode": "basic", "exclude": [ - "src/flask_ml/flask_ml_server/_generated_models.py" + "src/flask_ml/flask_ml_server/_generated_models.py", + "env", + ".env", + "venv", + ".venv", ] } diff --git a/simple_server.py b/simple_server.py index c72744a..26e9d7c 100644 --- a/simple_server.py +++ b/simple_server.py @@ -1,61 +1,67 @@ from typing import TypedDict -from flask_ml.flask_ml_server import MLServer -from flask_ml.flask_ml_server.models import BatchTextInput, BatchTextResponse, EnumParameterDescriptor, EnumVal, InputSchema, InputType, ParameterSchema, ResponseBody, TaskSchema, TextResponse +from flask_ml.flask_ml_server import MLServer, load_file_as_string +from flask_ml.flask_ml_server.models import ( + BatchTextInput, + BatchTextResponse, + EnumParameterDescriptor, + EnumVal, + InputSchema, + InputType, + ParameterSchema, + ResponseBody, + TaskSchema, + TextResponse, +) server = MLServer(__name__) +server.add_app_metadata( + name="Simple Server - Transform Case", + author="Flask-ML Team", + version="0.1.0", + info=load_file_as_string("simple_server_info.md"), +) + class TransformCaseInputs(TypedDict): text_inputs: BatchTextInput + class TransformCaseParameters(TypedDict): - to_case: str # 'upper' or 'lower' + to_case: str # 'upper' or 'lower' + def create_transform_case_task_schema() -> TaskSchema: - input_schema = InputSchema( - key="text_inputs", - label="Text to Transform", - input_type=InputType.BATCHTEXT - ) + input_schema = InputSchema(key="text_inputs", label="Text to Transform", input_type=InputType.BATCHTEXT) parameter_schema = ParameterSchema( key="to_case", label="Case to Transform Text Into", subtitle="'upper' will convert all text to upper case. 'lower' will convert all text to lower case.", value=EnumParameterDescriptor( - enum_vals=[ - EnumVal( - key="upper", - label="UPPER" - ), - EnumVal( - key="lower", - label="LOWER" - ) - ], - default="upper" - ) - ) - return TaskSchema( - inputs = [input_schema], - parameters = [parameter_schema] + enum_vals=[EnumVal(key="upper", label="UPPER"), EnumVal(key="lower", label="LOWER")], + default="upper", + ), ) + return TaskSchema(inputs=[input_schema], parameters=[parameter_schema]) + @server.route( "/transform_case", task_schema_func=create_transform_case_task_schema, short_title="Transform Case", - order=0 + order=0, ) def transform_case(inputs: TransformCaseInputs, parameters: TransformCaseParameters) -> ResponseBody: - to_upper: bool = parameters['to_case'] == 'upper' - + to_upper: bool = parameters["to_case"] == "upper" + outputs = [] - for text_input in inputs['text_inputs'].texts: + for text_input in inputs["text_inputs"].texts: raw_text = text_input.text processed_text = raw_text.upper() if to_upper else raw_text.lower() outputs.append(TextResponse(value=processed_text, title=raw_text)) return ResponseBody(root=BatchTextResponse(texts=outputs)) -if __name__ == '__main__': + +if __name__ == "__main__": # Run a debug server server.run() diff --git a/simple_server_info.md b/simple_server_info.md new file mode 100644 index 0000000..798b1de --- /dev/null +++ b/simple_server_info.md @@ -0,0 +1,20 @@ +# Transform Case + +This application transforms text to upper or lower case. It is a simple example of a Flask-ML application. + +## Inputs + +- **Batch of Text**: A batch of text inputs to be transformed. + +## Parameters + +- **Case to Transform Text Into**: The case to transform the text into. The options are 'upper' or 'lower'. + +## Outputs + +- **Batch of Text**: The transformed text. + +## Constraints + +- The text must be a single line of text. +- Providing a collection of text files is not supported. diff --git a/src/flask_ml/flask_ml_server/MLServer.py b/src/flask_ml/flask_ml_server/MLServer.py index ddf375d..1843520 100644 --- a/src/flask_ml/flask_ml_server/MLServer.py +++ b/src/flask_ml/flask_ml_server/MLServer.py @@ -17,6 +17,7 @@ ResponseBody, SchemaAPIRoute, TaskSchema, + AppMetadata ) from flask_ml.flask_ml_server.utils import ( ensure_ml_func_hinting_and_task_schemas_are_valid, @@ -60,6 +61,7 @@ def __init__(self, name): """ self.app = Flask(name, static_folder=None) self.endpoints: List[EndpointDetailsNoSchema] = [] + self._app_metadata: Optional[AppMetadata] = None @self.app.route("/api/routes", methods=["GET"]) def list_routes(): @@ -86,6 +88,20 @@ def list_routes(): for endpoint in self.endpoints ] return jsonify(APIRoutes(root=routes).model_dump(mode="json")) + + @self.app.route("/api/app_metadata", methods=["GET"]) + def get_app_metadata(): + if self._app_metadata is None: + return jsonify({"error": "App metadata not set"}) + return jsonify(self._app_metadata.model_dump(mode="json")) + + def add_app_metadata(self, name: str, author: str, version: str, info: str): + self._app_metadata = AppMetadata( + name=name, + author=author, + version=version, + info=info + ) def route( self, diff --git a/src/flask_ml/flask_ml_server/__init__.py b/src/flask_ml/flask_ml_server/__init__.py index 3c17de5..c661c4e 100644 --- a/src/flask_ml/flask_ml_server/__init__.py +++ b/src/flask_ml/flask_ml_server/__init__.py @@ -1,3 +1,11 @@ +from pathlib import Path from .MLServer import MLServer __all__ = ["MLServer"] # for flake8 unused import error + +def load_file_as_string(file_path: str) -> str: + fp = Path(file_path) + if not fp.is_file(): + raise FileNotFoundError(f"File {file_path} not found") + with open(file_path, "r") as f: + return f.read() diff --git a/src/flask_ml/flask_ml_server/models.py b/src/flask_ml/flask_ml_server/models.py index f7df410..73cf96d 100644 --- a/src/flask_ml/flask_ml_server/models.py +++ b/src/flask_ml/flask_ml_server/models.py @@ -1,24 +1,23 @@ # generated by datamodel-codegen: # filename: openapi.yaml -# timestamp: 2024-10-24T01:31:17+00:00 +# timestamp: 2024-10-31T02:34:36+00:00 from __future__ import annotations -from datetime import datetime from enum import Enum from typing import Annotated, Any, Dict, List, Optional, Union from pydantic import BaseModel, ConfigDict, Field, RootModel -class InfoPage(BaseModel): +class AppMetadata(BaseModel): model_config = ConfigDict( populate_by_name=True, ) info: Annotated[str, Field(description='Markdown content to render on the info page')] author: str version: str - last_updated: Annotated[datetime, Field(alias='lastUpdated')] + name: Annotated[str, Field(examples=['Face Match App'])] class SchemaAPIRoute(BaseModel): diff --git a/src/flask_ml/flask_ml_server/openapi.yaml b/src/flask_ml/flask_ml_server/openapi.yaml index 459a739..91becaf 100644 --- a/src/flask_ml/flask_ml_server/openapi.yaml +++ b/src/flask_ml/flask_ml_server/openapi.yaml @@ -1,19 +1,20 @@ + openapi: 3.0.0 info: title: FlaskML version: 1.0.0 description: API for processing machine learning inputs and returning results. paths: - /info: + /api/app_metadata: get: - summary: Get Info Page + summary: Get App Metadata responses: '200': - description: Info page rendered in markdown + description: App metadata content: application/json: schema: - $ref: '#/components/schemas/InfoPage' + $ref: '#/components/schemas/AppMetadata' /api/routes: get: @@ -86,9 +87,9 @@ paths: components: schemas: # Info Page models - InfoPage: + AppMetadata: type: object - required: [info, author, version, lastUpdated] + required: [info, name, author, version] properties: info: type: string @@ -97,9 +98,9 @@ components: type: string version: type: string - lastUpdated: + name: type: string - format: date-time + example: "Face Match App" example: info: "# Welcome to the Face Match App\n\nThis app will help you to match faces in your images..." diff --git a/tests/conftest.py b/tests/conftest.py index 9439d9d..0f833ac 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -71,7 +71,7 @@ def process_directories(inputs: List[DirectoryInput], parameters): return results -@pytest.fixture(scope="session", autouse=True) +@pytest.fixture(autouse=True) def server(): server = MLServer(__name__) @@ -187,6 +187,6 @@ def server_process_text_input_with_text_area_schema( return server -@pytest.fixture(scope="session", autouse=True) +@pytest.fixture(autouse=True) def app(server: MLServer): return server.app.test_client() diff --git a/tests/test_app_metadata.py b/tests/test_app_metadata.py new file mode 100644 index 0000000..70324ae --- /dev/null +++ b/tests/test_app_metadata.py @@ -0,0 +1,44 @@ +from pathlib import Path +from re import T +from flask_ml.flask_ml_server import MLServer, load_file_as_string +from flask.testing import FlaskClient +import pytest + +TEST_MARKDOWN_FILE_PATH = "test_markdown_file_path.md" +TEST_MARKDOWN_FILE_CONTENT = "# This is a test markdown file" + + +@pytest.fixture +def test_markdown_file_path(tmp_path: Path) -> str: + file_path = tmp_path / TEST_MARKDOWN_FILE_PATH + file_path.write_text(TEST_MARKDOWN_FILE_CONTENT) + return str(file_path) + + +def test_app_metadata_provided(server: MLServer, app: FlaskClient, test_markdown_file_path: str): + server.add_app_metadata( + info=load_file_as_string(test_markdown_file_path), + author="Test Author", + version="1.0.0", + name="Test App", + ) + + response = app.get("/api/app_metadata") + assert response.status_code == 200 + assert response.json == { + "info": TEST_MARKDOWN_FILE_CONTENT, + "author": "Test Author", + "version": "1.0.0", + "name": "Test App", + } + + +def test_app_metadata_not_provided(app: FlaskClient): + response = app.get("/api/app_metadata") + assert response.status_code == 200 + assert response.json == {"error": "App metadata not set"} + + +def test_invalid_file_path(): + with pytest.raises(FileNotFoundError): + load_file_as_string("invalid_file_path.md") diff --git a/website/materials/guides/getting-started.md b/website/materials/guides/getting-started.md index 93e8216..a7b5dac 100644 --- a/website/materials/guides/getting-started.md +++ b/website/materials/guides/getting-started.md @@ -436,4 +436,14 @@ if __name__ == "__main__": # Run a debug server server.run() -``` \ No newline at end of file +``` + +# Additional Features + +## Adding a automatically generated CLI + +Flask-ML can automatically generate a CLI for your machine learning code. See [Writing a CLI](./cli) for more information. + +## Adding Application Metadata + +You can provide metadata about your application for use by [Rescue-Box Desktop](https://github.com/UMass-Rescue/RescueBox-Desktop). See [Adding Application Metadata](./metadata) for more information. diff --git a/website/materials/guides/metadata.md b/website/materials/guides/metadata.md new file mode 100644 index 0000000..6d363a8 --- /dev/null +++ b/website/materials/guides/metadata.md @@ -0,0 +1,30 @@ +--- +sidebar_position: 4 +--- + +# Adding Application Metadata + +## Introduction + +Adding application metadata such as the name, description, and version of your application is a simple process. Rescue-Box Desktop uses this metadata to display information about your application in the application list. + +It is very simple to add metadata to your Flask-ML application. In your `server.py` file: + +```python +from flask_ml.flask_ml import MLServer, load_file_as_string + +server = MLServer(__name__) + +server.add_app_metadata( + name="Simple Server - Transform Case", + author="Flask-ML Team", + version="0.1.0", + info=load_file_as_string("simple_server_info.md"), +) +``` + +Here, info is a Markdown string that contains an overview of your application. See [Sample Application Info](https://github.com/UMass-Rescue/Flask-ML/blob/master/simple_server_info.md) for an example. + +This creates a new route at `/api/app_metadata` that returns the metadata as a JSON object. This will be used by [Rescue-Box Desktop](https://github.com/UMass-Rescue/RescueBox-Desktop) to display it here: + +![](/img/sample_markdown_app_metadata.png) diff --git a/website/static/img/sample_markdown_app_metadata.png b/website/static/img/sample_markdown_app_metadata.png new file mode 100644 index 0000000..a8acbcd Binary files /dev/null and b/website/static/img/sample_markdown_app_metadata.png differ