-
Notifications
You must be signed in to change notification settings - Fork 4
Expand file tree
/
Copy pathpyproject.toml
More file actions
53 lines (46 loc) · 1.15 KB
/
pyproject.toml
File metadata and controls
53 lines (46 loc) · 1.15 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
[build-system]
requires = ["setuptools>=70.1.0", "setuptools_scm>=8", "wheel"]
build-backend = "setuptools.build_meta"
[project]
name = "podman-ai-lab-stack"
version = "0.12.0"
description = "Podman AI Lab provider for Llama Stack"
requires-python = ">=3.10"
dependencies = [
"aiosqlite>=0.21.0",
"autoevals>=0.0.130",
"blobfile>=3.1.0",
"chardet>=5.2.0",
"datasets>=4.1.1",
"faiss-cpu>=1.12.0",
"fastapi>=0.118.0",
"greenlet>=3.2.4",
"llama-stack>=0.2.9",
"mcp>=1.15.0",
"ollama>=0.6.0",
"opentelemetry-api>=1.37.0",
"opentelemetry-exporter-otlp>=1.37.0",
"opentelemetry-sdk>=1.37.0",
"pydantic>=2.11.9",
"pypdf>=6.1.1",
"sentence-transformers>=5.1.1",
"sqlalchemy>=2.0.43",
"torch>=2.7.0",
]
[tool.setuptools]
package-dir = { "" = "src" }
include-package-data = true
[tool.setuptools.package-data]
"podman_ai_lab_stack" = ["providers.d/**/*", "run.yaml"]
[tool.setuptools.dynamic]
dependencies = { file = ["requirements.txt"] }
[tool.ruff]
extend-exclude = ["*.ipynb"]
[[tool.uv.index]]
name = "pytorch-cpu"
url = "https://download.pytorch.org/whl/cpu"
explicit = true
[tool.uv.sources]
torch = [
{ index = "pytorch-cpu" },
]