-
-
Notifications
You must be signed in to change notification settings - Fork 86
Expand file tree
/
Copy pathpyproject.toml
More file actions
129 lines (119 loc) · 4.08 KB
/
pyproject.toml
File metadata and controls
129 lines (119 loc) · 4.08 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
[build-system]
requires = ["setuptools", "setuptools-scm"]
build-backend = "setuptools.build_meta"
[tool.uv]
package = true
required-version = ">=0.9.17"
exclude-newer = "7 days"
exclude-newer-package = { torch = false } # pytorch index lacks upload timestamps
[[tool.uv.index]]
name = "pytorch-cpu"
url = "https://download.pytorch.org/whl/cpu"
explicit = true
[tool.uv.sources]
torch = { index = "pytorch-cpu" }
[project]
name = "symbolicai"
dynamic = ["version"]
authors = [
{name = "Marius-Constantin Dinu", email = "marius@extensity.ai"},
{name = "Leoveanu-Condrei Claudiu", email = "leo@extensity.ai"},
]
description = "A Neurosymbolic Perspective on Large Language Models"
readme = "README.md"
requires-python = ">=3.11"
keywords = ["probabilistic programming", "machine learning"]
license = {file = "LICENSE"}
classifiers = [
"Programming Language :: Python :: 3",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
]
dependencies = [
"attrs>=23.2.0",
"setuptools>=70.0.0",
"toml>=0.10.2",
"loguru>=0.7.3",
"aiohttp>=3.13.4",
"numpy>=1.26.4,<=2.1.3",
"tqdm>=4.66.3",
"python-box>=7.1.1",
"torch<2.10.0", # weird errors, things like "no torch.Tensor"
"sympy>=1.12",
"openai>=1.60.0",
"anthropic>=0.87.0",
"google-genai>=1.16.1",
"ipython>=8.24.0",
"tiktoken>=0.8.0",
"GitPython>=3.1.42",
"prompt-toolkit>=3.0.43",
"opencv-python>=4.8.1.78",
"requests-toolbelt>=1.0.0",
"pyvis>=0.3.2",
"beartype>=0.18.2",
"pydantic>=2.8.2",
"pydantic-core>=2.20.1",
"nest-asyncio>=1.6.0",
"rich>=13.9.4",
"cerebras-cloud-sdk>=1.59.0",
"scikit-learn>=1.7.0",
"markitdown[docx,outlook,pdf,pptx,xls,xlsx]",
"dill>=0.4.0"
]
[project.optional-dependencies]
bitsandbytes = ["bitsandbytes>=0.43.1"] # handle separately because of Apple Silicon
hf = ["transformers>=4.45.2", "accelerate>=0.33.0", "peft>=0.13.1", "datasets>=3.0.1", "trl>=0.11.3", "sentencepiece>=0.2.0", "sentence-transformers>=2.5.1"]
scrape = ["beautifulsoup4>=4.12.3", "trafilatura>=2.0.0", "pdfminer.six", "playwright>=1.45.0", "parallel-web>=0.3.3"]
llama_cpp = ["llama-cpp-python[server]>=0.3.7"] # handle separately since this dependency may not compile and require special maintenance
# NOTE: vllm is a bring-your-own dependency (same pattern as llama.cpp's --cpp-server-path).
# Users clone + build vLLM in its own venv and pass --vllm-python-path to symserver.
# See docs/source/ENGINES/local_engine.md.
wolframalpha = ["wolframalpha>=5.0.0"]
lean = ["docker>=7.0.0", "axiom-axle>=1.0.0"]
whisper = ["openai-whisper>=20240930", "numba>=0.62.1", "llvmlite>=0.45.1"]
search = ["firecrawl-py>=4.12.0", "parallel-web>=0.3.3", "tldextract>=5.1.0"]
serpapi = ["google_search_results>=2.4.2"]
ocr = ["mistralai>=1.0.0"]
services = ["fastapi>=0.110.0", "redis>=5.0.2", "uvicorn>=0.27.1"]
solver = ["z3-solver>=4.12.6.0"]
qdrant = ["qdrant-client", "chonkie>=0.4.1", "tokenizers", "requests"]
all = [
"symbolicai[hf]",
"symbolicai[wolframalpha]",
"symbolicai[whisper]",
"symbolicai[scrape]",
"symbolicai[search]",
"symbolicai[serpapi]",
"symbolicai[services]",
"symbolicai[solver]",
"symbolicai[qdrant]",
"symbolicai[lean]",
"symbolicai[ocr]"
]
dev = [
"pytest-asyncio>=1.3.0",
]
[tool.setuptools.dynamic]
version = {attr = "symai.SYMAI_VERSION"}
[tool.setuptools.package-data]
"*" = ["*.json", "*.md", "*.pytxt"]
[tool.setuptools.packages.find]
include = ["symai*"]
exclude = ["tests", "examples", "notebooks", "outputs", "assets"]
[dependency-groups]
dev = [
"ruff>=0.14.4",
"pytest>=8.3.1",
"pytest-asyncio>=1.3.0",
]
[project.urls]
"Homepage" = "https://extensity.ai"
"GitHub" = "https://github.com/ExtensityAI/symbolicai"
[project.scripts]
symchat = "symai.chat:run"
symsh = "symai.shell:run"
sympkg = "symai.extended.packages.sympkg:run"
symdev = "symai.extended.packages.symdev:run"
symrun = "symai.extended.packages.symrun:run"
symconfig = "symai:display_config"
symserver = "symai:run_server"