diff --git a/.devcontainer/cpu/devcontainer.json b/.devcontainer/cpu/devcontainer.json index 583beea..0f1a219 100644 --- a/.devcontainer/cpu/devcontainer.json +++ b/.devcontainer/cpu/devcontainer.json @@ -9,7 +9,7 @@ "remoteUser": "root", "workspaceFolder": "${localWorkspaceFolder}", "workspaceMount": "source=${localWorkspaceFolder},target=${localWorkspaceFolder},type=bind", - "postCreateCommand": "uv pip install -e .[dev]", + "postCreateCommand": "uv pip install -e .[cpu,dev]", "remoteEnv": { "UV_SYSTEM_PYTHON": "true" }, diff --git a/Dockerfile b/Dockerfile index f75d677..d43f1e4 100644 --- a/Dockerfile +++ b/Dockerfile @@ -7,7 +7,7 @@ RUN apt-get update && \ WORKDIR /app COPY focoos ./focoos COPY pyproject.toml ./pyproject.toml -RUN uv pip install --system -e . +RUN uv pip install --system -e .[cpu] FROM ghcr.io/focoosai/deeplearning:base-cu12-cudnn9-py312-uv AS focoos-cuda diff --git a/Makefile b/Makefile index e38af99..435da32 100644 --- a/Makefile +++ b/Makefile @@ -10,7 +10,7 @@ venv: @uv venv --python=python3.12 install: .uv .pre-commit - @uv pip install -e ".[dev]" --no-cache-dir + @uv pip install -e ".[cuda,dev]" --no-cache-dir @pre-commit install install-gpu: .uv .pre-commit diff --git a/README.md b/README.md index 8f02e76..b7f8004 100644 --- a/README.md +++ b/README.md @@ -50,7 +50,7 @@ Foocoos is shipped with the following extras dependencies: ## CPU only or Remote Usage ```bash -uv pip install focoos git+https://github.com/FocoosAI/focoos.git +uv pip install focoos[cpu] git+https://github.com/FocoosAI/focoos.git ``` ## GPU Runtimes @@ -60,11 +60,7 @@ uv pip install focoos[torch] git+https://github.com/FocoosAI/focoos.git ``` ### OnnxRuntime CUDA -ensure that you have CUDA 12 and cuDNN 9 installed, as they are required for onnxruntime version 1.20.1. - -```bash -apt-get -y install cudnn9-cuda-12 -``` +- ```bash uv pip install focoos[cuda] git+https://github.com/FocoosAI/focoos.gi diff --git a/docs/getting_started/setup.md b/docs/getting_started/setup.md index 6128b5e..6658ca8 100644 --- a/docs/getting_started/setup.md +++ b/docs/getting_started/setup.md @@ -4,8 +4,9 @@ Focoos models support multiple inference runtimes. To keep the library lightweight and to allow users to use their environment, optional dependencies (e.g., torch, onnxruntime, tensorrt) are not installed by default. Foocoos is shipped with the following extras dependencies: -- `[torch]`: torchscript CUDA +- `[cpu]`: onnxruntime CPU - `[cuda]`: onnxruntime CUDA +- `[torch]`: torchscript CUDA - `[tensorrt]`: onnxruntime TensorRT !!! note @@ -40,7 +41,7 @@ source .venv/bin/activate If you plan to run the SDK on a CPU-only environment: ```bash linenums="0" -pip install 'focoos @ git+https://github.com/FocoosAI/focoos.git' +pip install 'focoos[cpu] @ git+https://github.com/FocoosAI/focoos.git' ``` * NVIDIA GPU Environment (torchscript) diff --git a/pyproject.toml b/pyproject.toml index 2127577..6157154 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -45,8 +45,7 @@ dependencies = [ "setuptools~=75.7.0", "matplotlib~=3.10.0", "colorama~=0.4.6", - "ipython", - "onnxruntime==1.20.1" + "ipython" ] authors = [{ name = "focoos.ai", email = "info@focoos.ai" }] @@ -58,6 +57,7 @@ keywords = [ ] [project.optional-dependencies] +cpu = ["onnxruntime==1.20.1"] cuda = ["onnxruntime-gpu==1.20.1"] tensorrt = ["onnxruntime-gpu==1.20.1","tensorrt==10.5.0"] torch = ["torch==2.3.0","torchvision"]