-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathdocker.env.example
More file actions
160 lines (125 loc) · 5.69 KB
/
docker.env.example
File metadata and controls
160 lines (125 loc) · 5.69 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
# =============================================================================
# Trellis 3D Docker Configuration
# =============================================================================
# Copy this file to .env and modify as needed
# Usage: cp docker.env.example .env
#
# These variables are used by docker-compose.yml and can also be used
# when building directly with docker build
# =============================================================================
# -----------------------------------------------------------------------------
# CUDA and System Configuration
# -----------------------------------------------------------------------------
# CUDA version (must match available nvidia/cuda images)
# See: https://hub.docker.com/r/nvidia/cuda/tags
CUDA_VERSION=12.3.2
# cuDNN version (9 for CUDA 12.4+, 8 for older CUDA)
CUDNN_VERSION=9
# Ubuntu version
UBUNTU_VERSION=22.04
# Python version (use major.minor format, e.g., 3.10)
PYTHON_VERSION=3.10
# -----------------------------------------------------------------------------
# Python Package Versions
# -----------------------------------------------------------------------------
# Poetry version for dependency management
POETRY_VERSION=1.8.3
# PyTorch version (should match your requirements)
TORCH_VERSION=2.4.0
# Kaolin version and source URL
KAOLIN_VERSION=0.17.0
KAOLIN_INDEX_URL=https://nvidia-kaolin.s3.us-east-2.amazonaws.com/torch-2.4.0_cu121.html
# CUDA architectures to compile for (space-separated compute capabilities)
# 7.0,7.5 = Volta/Turing (V100, RTX 2080), 8.0,8.6 = Ampere (A100, RTX 3090)
# 8.9 = Ada Lovelace (RTX 4090), 9.0 = Hopper (H100)
TORCH_CUDA_ARCH_LIST=7.0 7.5 8.0 8.6 8.9 9.0
# -----------------------------------------------------------------------------
# Package Installation Options
# -----------------------------------------------------------------------------
# Force building packages from source instead of using wheels
# Useful for different CUDA versions or custom builds
FORCE_BUILD_FLASH_ATTN=false
# -----------------------------------------------------------------------------
# Application Configuration
# -----------------------------------------------------------------------------
# Application user (non-root user inside container)
APP_USER=appuser
# User ID for the app user (match your host user ID for volume permissions)
# Run `id -u` on Linux to get your user ID
APP_UID=1000
# Port inside the container (default Streamlit port)
APP_PORT=8501
# Port on the host machine (what you access in your browser)
HOST_PORT=8501
# -----------------------------------------------------------------------------
# Streamlit Configuration
# -----------------------------------------------------------------------------
# Streamlit server address (0.0.0.0 to accept all connections)
STREAMLIT_SERVER_ADDRESS=0.0.0.0
# Run in headless mode (no browser auto-open)
STREAMLIT_SERVER_HEADLESS=true
# -----------------------------------------------------------------------------
# GPU Configuration
# -----------------------------------------------------------------------------
# Which GPUs to use (all, or comma-separated device IDs like "0,1")
CUDA_VISIBLE_DEVICES=all
# Number of GPUs to allocate (all, or a specific number)
GPU_COUNT=all
# -----------------------------------------------------------------------------
# Cache Directory Configuration
# -----------------------------------------------------------------------------
# Cache directories inside the container
CACHE_DIR=/home/appuser/.cache
HF_CACHE_DIR=/home/appuser/.cache/huggingface
REMBG_CACHE_DIR=/home/appuser/.u2net
TRELLIS_OUTPUT_DIR=/tmp/Trellis-demo
# Docker volume names for persistent storage
CACHE_VOLUME=trellis-cache
HF_CACHE_VOLUME=huggingface-cache
REMBG_CACHE_VOLUME=rembg-cache
# Host directory for outputs (relative or absolute path)
OUTPUTS_HOST_DIR=./outputs
# Host cache directories (bind mounts for better compatibility)
HOST_CACHE_DIR=~/.cache/trellis
HOST_HF_CACHE_DIR=~/.cache/huggingface
HOST_REMBG_CACHE_DIR=~/.cache/rembg
# -----------------------------------------------------------------------------
# Docker Image Configuration
# -----------------------------------------------------------------------------
# Docker image name
IMAGE_NAME=trellis-box
# Docker image tag
IMAGE_TAG=latest
# -----------------------------------------------------------------------------
# Example Configurations for Different Scenarios
# -----------------------------------------------------------------------------
# Example 1: Use a different CUDA version (check Docker Hub for available versions)
# CUDA_VERSION=12.6.0
# CUDNN_VERSION=9
# Example 2: Change the application port
# APP_PORT=8080
# HOST_PORT=8080
# Example 3: Custom Streamlit configuration
# STREAMLIT_SERVER_ADDRESS=127.0.0.1 # Only localhost (not recommended for Docker)
# STREAMLIT_SERVER_HEADLESS=false # Open browser automatically
# Example 4: Match host user ID (useful for file permissions)
# APP_UID=1001 # Replace with output of: id -u
# Example 5: Use only specific GPUs
# CUDA_VISIBLE_DEVICES=0,1
# GPU_COUNT=2
# Example 6: Use Python 3.11 (ensure compatibility with dependencies first)
# PYTHON_VERSION=3.11
# Example 7: Compile only for your specific GPU (faster build)
# For RTX 3090 only: TORCH_CUDA_ARCH_LIST=8.6
# For RTX 4090 only: TORCH_CUDA_ARCH_LIST=8.9
# For multiple: TORCH_CUDA_ARCH_LIST=8.6 8.9
# Example 8: Use custom cache directories
# CACHE_DIR=/data/.cache
# HF_CACHE_DIR=/data/.cache/huggingface
# REMBG_CACHE_DIR=/data/.u2net
# Example 9: Use custom volume names (useful for multiple instances)
# CACHE_VOLUME=trellis-dev-cache
# HF_CACHE_VOLUME=huggingface-dev-cache
# REMBG_CACHE_VOLUME=rembg-dev-cache
# Example 10: Store outputs in a different directory
# OUTPUTS_HOST_DIR=/path/to/my/outputs