mirror of
https://github.com/comfyanonymous/ComfyUI.git
synced 2025-04-20 03:13:30 +00:00
Merge 9608d029c5
into 98bdca4cb2
This commit is contained in:
commit
cd181cc06b
97
Dockerfile
Normal file
97
Dockerfile
Normal file
@ -0,0 +1,97 @@
|
|||||||
|
# --- Base Image Selection ---
|
||||||
|
ARG BASE_IMAGE_TAG="nvidia/cuda:12.8.1-cudnn-runtime-ubuntu24.04"
|
||||||
|
ARG TORCH_PRE_FLAG=""
|
||||||
|
ARG TORCH_INDEX_URL=""
|
||||||
|
ARG TORCH_EXTRA_INDEX_URL=""
|
||||||
|
|
||||||
|
# Use the ARG for the base image build stage
|
||||||
|
FROM ${BASE_IMAGE_TAG} AS base
|
||||||
|
# --- First stage ends here ---
|
||||||
|
|
||||||
|
# --- Start the final stage from the base ---
|
||||||
|
FROM base
|
||||||
|
|
||||||
|
# Environment variables (Keep as is)
|
||||||
|
ENV DEBIAN_FRONTEND=noninteractive
|
||||||
|
ENV UV_INSTALL_DIR="/root/.local/bin"
|
||||||
|
ENV UV_EXE="/root/.local/bin/uv"
|
||||||
|
ENV PYTHON_VERSION="3.12"
|
||||||
|
ENV VENV_PATH="/app/venv"
|
||||||
|
ENV VENV_PYTHON="${VENV_PATH}/bin/python"
|
||||||
|
ENV PATH="${VENV_PATH}/bin:${UV_INSTALL_DIR}:${PATH}"
|
||||||
|
|
||||||
|
# --- Layer 1: Install OS Dependencies & Python --- (Keep as is)
|
||||||
|
RUN apt-get update \
|
||||||
|
&& apt-get install -y --no-install-recommends \
|
||||||
|
git curl "python${PYTHON_VERSION}" "python${PYTHON_VERSION}-dev" "python${PYTHON_VERSION}-venv" \
|
||||||
|
wget ffmpeg libsm6 libxext6 libgl1 grep \
|
||||||
|
&& apt-get clean && rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# --- Layer 2: Install and Verify UV --- (Keep as is)
|
||||||
|
RUN echo "Installing uv..." \
|
||||||
|
&& curl -LsSf https://astral.sh/uv/install.sh | sh \
|
||||||
|
&& echo "Verifying uv installation..." \
|
||||||
|
&& ${UV_EXE} --version
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# --- Layer 3: Create Virtual Environment & Ensure Core Tools ---
|
||||||
|
RUN echo "Creating virtual environment with uv..." \
|
||||||
|
&& ${UV_EXE} venv ${VENV_PATH} --python "python${PYTHON_VERSION}" \
|
||||||
|
&& echo "Ensuring pip and wheel are installed/updated in venv..." \
|
||||||
|
# Explicitly install/upgrade pip and wheel using uv right after venv creation
|
||||||
|
&& ${UV_EXE} pip install -p ${VENV_PYTHON} --upgrade pip wheel \
|
||||||
|
&& echo "Verifying pip exists in venv:" \
|
||||||
|
&& ${VENV_PYTHON} -m pip --version
|
||||||
|
|
||||||
|
# --- RE-DECLARE ARGs HERE ---
|
||||||
|
ARG TORCH_PRE_FLAG
|
||||||
|
ARG TORCH_INDEX_URL
|
||||||
|
ARG TORCH_EXTRA_INDEX_URL
|
||||||
|
|
||||||
|
# --- Layer 4: PyTorch Installation ---
|
||||||
|
RUN echo "--- Executing PyTorch Install Step ---" \
|
||||||
|
&& echo " ARG TORCH_PRE_FLAG='${TORCH_PRE_FLAG}'" \
|
||||||
|
&& echo " ARG TORCH_INDEX_URL='${TORCH_INDEX_URL}'" \
|
||||||
|
&& echo " ARG TORCH_EXTRA_INDEX_URL='${TORCH_EXTRA_INDEX_URL}'" \
|
||||||
|
&& echo " Now running uv pip install..." \
|
||||||
|
&& ${UV_EXE} pip install \
|
||||||
|
--upgrade \
|
||||||
|
-p ${VENV_PYTHON} \
|
||||||
|
# --- REMOVED INNER QUOTES from expansions ---
|
||||||
|
${TORCH_PRE_FLAG:+$TORCH_PRE_FLAG} \
|
||||||
|
${TORCH_INDEX_URL:+$TORCH_INDEX_URL} \
|
||||||
|
${TORCH_EXTRA_INDEX_URL:+$TORCH_EXTRA_INDEX_URL} \
|
||||||
|
# --- END REMOVED INNER QUOTES ---
|
||||||
|
torch torchvision torchaudio
|
||||||
|
|
||||||
|
# --- Layer 5: ComfyUI Setup (Clone & Requirements) ---
|
||||||
|
RUN echo "Cloning ComfyUI..." \
|
||||||
|
&& git clone https://github.com/RedsAnalysis/ComfyUI.git /app/comfyui \
|
||||||
|
&& echo "Filtering requirements.txt to remove potential torch conflicts..." \
|
||||||
|
&& grep -vE '^torch(vision|audio)?(=|<|>)?' /app/comfyui/requirements.txt > /app/comfyui/requirements.filtered.txt \
|
||||||
|
&& REQS_FILE="/app/comfyui/requirements.filtered.txt" \
|
||||||
|
&& echo "Installing ComfyUI base requirements from ${REQS_FILE}..." \
|
||||||
|
# Explicitly add torchsde here along with requirements file
|
||||||
|
&& ${UV_EXE} pip install \
|
||||||
|
-p ${VENV_PYTHON} \
|
||||||
|
pyyaml \
|
||||||
|
torchsde \
|
||||||
|
-r ${REQS_FILE}
|
||||||
|
|
||||||
|
# --- Layer 6: ComfyUI-Manager Setup --- (Keep as is)
|
||||||
|
RUN echo "Cloning ComfyUI-Manager..." \
|
||||||
|
&& git clone https://github.com/ltdrdata/ComfyUI-Manager.git /app/comfyui/custom_nodes/ComfyUI-Manager \
|
||||||
|
&& MANAGER_REQS="/app/comfyui/custom_nodes/ComfyUI-Manager/requirements.txt" \
|
||||||
|
&& if [ -f "${MANAGER_REQS}" ]; then \
|
||||||
|
echo "Installing ComfyUI-Manager requirements..."; \
|
||||||
|
${UV_EXE} pip install -p ${VENV_PYTHON} -r ${MANAGER_REQS}; \
|
||||||
|
else \
|
||||||
|
echo "ComfyUI-Manager requirements.txt not found."; \
|
||||||
|
fi
|
||||||
|
|
||||||
|
# --- Final Setup --- (Keep as is)
|
||||||
|
EXPOSE 8188
|
||||||
|
HEALTHCHECK --interval=15s --timeout=5s --start-period=30s --retries=3 \
|
||||||
|
CMD curl --fail http://localhost:8188/ || exit 1
|
||||||
|
CMD ["python", "/app/comfyui/main.py", "--listen", "0.0.0.0", "--port", "8188"]
|
22
README.md
22
README.md
@ -271,6 +271,28 @@ For models compatible with Cambricon Extension for PyTorch (torch_mlu). Here's a
|
|||||||
2. Next, install the PyTorch(torch_mlu) following the instructions on the [Installation](https://www.cambricon.com/docs/sdk_1.15.0/cambricon_pytorch_1.17.0/user_guide_1.9/index.html)
|
2. Next, install the PyTorch(torch_mlu) following the instructions on the [Installation](https://www.cambricon.com/docs/sdk_1.15.0/cambricon_pytorch_1.17.0/user_guide_1.9/index.html)
|
||||||
3. Launch ComfyUI by running `python main.py`
|
3. Launch ComfyUI by running `python main.py`
|
||||||
|
|
||||||
|
# DOCKER
|
||||||
|
|
||||||
|
FOR Nvidia and AMD supported by ROCm
|
||||||
|
|
||||||
|
After cloning the repo, run the following command once in bash terminal to build the docker image:
|
||||||
|
|
||||||
|
```chmod +x docker-build.sh```
|
||||||
|
|
||||||
|
```./docker-build.sh```
|
||||||
|
|
||||||
|
It will prompt you to select you GPU and the pytorch version you want to install. PS use latest for RTX 5000 seriers cards.
|
||||||
|
|
||||||
|
Use ``` 1 or 2 ```, to make your selection.
|
||||||
|
|
||||||
|
After you docker image is built, you can launch ComfyUI by running the following command:
|
||||||
|
|
||||||
|
```docker-compose up```
|
||||||
|
|
||||||
|
To stop the container, run:
|
||||||
|
|
||||||
|
```docker-compose down```
|
||||||
|
|
||||||
# Running
|
# Running
|
||||||
|
|
||||||
```python main.py```
|
```python main.py```
|
||||||
|
157
docker-build.sh
Normal file
157
docker-build.sh
Normal file
@ -0,0 +1,157 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
set -e # Exit immediately if a command exits with a non-zero status.
|
||||||
|
|
||||||
|
# Define Python Version and Venv Path (must match Dockerfile ENV)
|
||||||
|
PYTHON_VERSION="3.12"
|
||||||
|
VENV_PATH="/app/venv"
|
||||||
|
VENV_PYTHON="${VENV_PATH}/bin/python" # Used for constructing PyTorch install args
|
||||||
|
|
||||||
|
# Function to display version information
|
||||||
|
display_version_info() {
|
||||||
|
echo "==========================================================="
|
||||||
|
echo " PyTorch Version Selection:"
|
||||||
|
echo "-----------------------------------------------------------"
|
||||||
|
echo " Stable Version:"
|
||||||
|
echo " - Thoroughly tested, recommended for general use."
|
||||||
|
echo " - Pros: Reliability, fewer bugs."
|
||||||
|
echo " - Cons: May lack the absolute latest features."
|
||||||
|
echo "-----------------------------------------------------------"
|
||||||
|
echo " Latest Version (Nightly/Pre-release):"
|
||||||
|
echo " - Includes newest features and optimizations."
|
||||||
|
echo " - Pros: Cutting-edge capabilities."
|
||||||
|
echo " - Cons: Potentially less stable, may have bugs."
|
||||||
|
echo "==========================================================="
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to ask user for GPU type
|
||||||
|
ask_gpu_type() {
|
||||||
|
echo "Select GPU Type:"
|
||||||
|
select gpu_choice in "NVIDIA" "AMD" "Cancel"; do
|
||||||
|
case $gpu_choice in
|
||||||
|
NVIDIA)
|
||||||
|
gpu="NVIDIA"
|
||||||
|
echo "Selected: NVIDIA"
|
||||||
|
break
|
||||||
|
;;
|
||||||
|
AMD)
|
||||||
|
gpu="AMD"
|
||||||
|
echo "Selected: AMD"
|
||||||
|
break
|
||||||
|
;;
|
||||||
|
Cancel)
|
||||||
|
echo "Build cancelled."
|
||||||
|
exit 0
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo "Invalid option $REPLY. Please choose 1, 2, or 3."
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to ask user for version preference
|
||||||
|
ask_version() {
|
||||||
|
echo "Select PyTorch Version:"
|
||||||
|
select version_choice in "Stable" "Latest" "Cancel"; do
|
||||||
|
case $version_choice in
|
||||||
|
Stable)
|
||||||
|
version="Stable"
|
||||||
|
echo "Selected: Stable"
|
||||||
|
break
|
||||||
|
;;
|
||||||
|
Latest)
|
||||||
|
version="Latest"
|
||||||
|
echo "Selected: Latest"
|
||||||
|
break
|
||||||
|
;;
|
||||||
|
Cancel)
|
||||||
|
echo "Build cancelled."
|
||||||
|
exit 0
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo "Invalid option $REPLY. Please choose 1, 2, or 3."
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
}
|
||||||
|
|
||||||
|
# --- Main Script Logic ---
|
||||||
|
display_version_info
|
||||||
|
ask_gpu_type
|
||||||
|
ask_version
|
||||||
|
|
||||||
|
# --- Determine Build Arguments based on Input ---
|
||||||
|
echo "Configuring build arguments..."
|
||||||
|
|
||||||
|
# --- Initialize new ARGs ---
|
||||||
|
TORCH_PRE_FLAG=""
|
||||||
|
TORCH_INDEX_URL=""
|
||||||
|
TORCH_EXTRA_INDEX_URL="" # Initialize as empty
|
||||||
|
|
||||||
|
if [[ "$gpu" == "NVIDIA" ]]; then
|
||||||
|
if [[ "$version" == "Stable" ]]; then
|
||||||
|
BASE_IMAGE_TAG="nvidia/cuda:12.6.3-cudnn-runtime-ubuntu24.04"
|
||||||
|
TORCH_EXTRA_INDEX_URL="--extra-index-url https://download.pytorch.org/whl/cu126"
|
||||||
|
else # Latest
|
||||||
|
BASE_IMAGE_TAG="nvidia/cuda:12.8.1-cudnn-runtime-ubuntu24.04"
|
||||||
|
TORCH_PRE_FLAG="--pre"
|
||||||
|
TORCH_INDEX_URL="--index-url https://download.pytorch.org/whl/nightly/cu128"
|
||||||
|
fi
|
||||||
|
|
||||||
|
elif [[ "$gpu" == "AMD" ]]; then
|
||||||
|
if [[ "$version" == "Stable" ]]; then
|
||||||
|
BASE_IMAGE_TAG="rocm/dev-ubuntu-24.04:6.2.4-complete"
|
||||||
|
TORCH_INDEX_URL="--index-url https://download.pytorch.org/whl/rocm6.2"
|
||||||
|
else # Latest
|
||||||
|
BASE_IMAGE_TAG="rocm/dev-ubuntu-24.04:6.3.4-complete"
|
||||||
|
TORCH_PRE_FLAG="--pre"
|
||||||
|
TORCH_INDEX_URL="--index-url https://download.pytorch.org/whl/nightly/rocm6.3"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
echo "Error: Invalid GPU type configured after selection."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# --- Construct and Run the Docker Build Command ---
|
||||||
|
IMAGE_NAME="comfyui-red-image:${gpu,,}-${version,,}"
|
||||||
|
|
||||||
|
echo "-----------------------------------------------------------"
|
||||||
|
echo "Starting Docker build..."
|
||||||
|
echo " Image Tag: ${IMAGE_NAME}"
|
||||||
|
echo " Base Image: ${BASE_IMAGE_TAG}"
|
||||||
|
echo " PyTorch Pre Flag: '${TORCH_PRE_FLAG}'"
|
||||||
|
echo " PyTorch Index URL: '${TORCH_INDEX_URL}'"
|
||||||
|
echo " PyTorch Extra Index URL: '${TORCH_EXTRA_INDEX_URL}'"
|
||||||
|
echo "-----------------------------------------------------------"
|
||||||
|
|
||||||
|
# Build the image using the SEPARATE Docker build arguments
|
||||||
|
# REMOVED the interrupting comments from within this command block
|
||||||
|
docker build \
|
||||||
|
--no-cache \
|
||||||
|
--build-arg BASE_IMAGE_TAG="${BASE_IMAGE_TAG}" \
|
||||||
|
--build-arg TORCH_PRE_FLAG="${TORCH_PRE_FLAG}" \
|
||||||
|
--build-arg TORCH_INDEX_URL="${TORCH_INDEX_URL}" \
|
||||||
|
--build-arg TORCH_EXTRA_INDEX_URL="${TORCH_EXTRA_INDEX_URL}" \
|
||||||
|
-t "${IMAGE_NAME}" \
|
||||||
|
-f Dockerfile .
|
||||||
|
|
||||||
|
BUILD_STATUS=$?
|
||||||
|
|
||||||
|
# --- Report Build Status ---
|
||||||
|
echo "-----------------------------------------------------------"
|
||||||
|
if [ $BUILD_STATUS -eq 0 ]; then
|
||||||
|
echo "Docker build successful!"
|
||||||
|
echo "Image created: ${IMAGE_NAME}"
|
||||||
|
echo ""
|
||||||
|
echo "To run the container using Docker Compose (assuming docker-compose.yml is configured):"
|
||||||
|
echo " docker-compose up -d"
|
||||||
|
echo ""
|
||||||
|
echo "To stop the container:"
|
||||||
|
echo " docker-compose down"
|
||||||
|
else
|
||||||
|
echo "Docker build failed with status: ${BUILD_STATUS}"
|
||||||
|
fi
|
||||||
|
echo "==========================================================="
|
||||||
|
|
||||||
|
exit $BUILD_STATUS
|
36
docker-compose.yml
Normal file
36
docker-compose.yml
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
services:
|
||||||
|
comfyui_backend:
|
||||||
|
image: comfyui-red-image:nvidia-latest
|
||||||
|
container_name: comfyui-red-container
|
||||||
|
ports:
|
||||||
|
- "8188:8188"
|
||||||
|
volumes:
|
||||||
|
# Host Mounts for data access
|
||||||
|
- ./input:/app/comfyui/input
|
||||||
|
- ./output:/app/comfyui/output
|
||||||
|
- ./models:/app/comfyui/models
|
||||||
|
# Named Volumes for persistent state
|
||||||
|
- comfyui_venv_data:/app/venv
|
||||||
|
- comfyui_custom_nodes:/app/comfyui/custom_nodes
|
||||||
|
- comfyui_user_config:/app/comfyui/user
|
||||||
|
environment:
|
||||||
|
- PATH=/app/venv/bin:/root/.local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
|
||||||
|
deploy:
|
||||||
|
resources:
|
||||||
|
reservations:
|
||||||
|
devices:
|
||||||
|
- driver: nvidia
|
||||||
|
count: all
|
||||||
|
capabilities: [gpu]
|
||||||
|
restart: unless-stopped
|
||||||
|
# Fallback GPU config (uncomment if 'deploy' fails)
|
||||||
|
# runtime: nvidia
|
||||||
|
# environment:
|
||||||
|
# - NVIDIA_VISIBLE_DEVICES=all
|
||||||
|
# - NVIDIA_DRIVER_CAPABILITIES=all
|
||||||
|
|
||||||
|
# Named Volume Definitions
|
||||||
|
volumes:
|
||||||
|
comfyui_venv_data: {}
|
||||||
|
comfyui_custom_nodes: {}
|
||||||
|
comfyui_user_config: {}
|
@ -1,8 +1,5 @@
|
|||||||
comfyui-frontend-package==1.14.6
|
comfyui-frontend-package==1.14.6
|
||||||
torch
|
|
||||||
torchsde
|
torchsde
|
||||||
torchvision
|
|
||||||
torchaudio
|
|
||||||
numpy>=1.25.0
|
numpy>=1.25.0
|
||||||
einops
|
einops
|
||||||
transformers>=4.28.1
|
transformers>=4.28.1
|
||||||
|
Loading…
Reference in New Issue
Block a user