Dheeraj-13 commited on
Commit
683746a
·
1 Parent(s): 4b47d47

Switch to loose requirements to fix Python version mismatch

Browse files
Files changed (1) hide show
  1. requirements.txt +15 -313
requirements.txt CHANGED
@@ -1,315 +1,17 @@
1
- # This file was autogenerated by uv via the following command:
2
- # uv export --no-hashes --format requirements-txt
3
- accelerate==1.12.0
4
- # via rag-knowledge-assistant
5
- aiofiles==24.1.0
6
- annotated-doc==0.0.4
7
- # via fastapi
8
- annotated-types==0.7.0
9
- # via pydantic
10
- anyio==4.12.0
11
- # via
12
- # httpx
13
- # openai
14
- # starlette
15
- audioop-lts==0.2.2 ; python_full_version >= '3.13'
16
- backoff==2.2.1
17
- # via langfuse
18
- beautifulsoup4==4.14.3
19
- # via rag-knowledge-assistant
20
- brotli==1.2.0
21
- certifi==2025.11.12
22
- # via
23
- # httpcore
24
- # httpx
25
- # requests
26
- charset-normalizer==3.4.4
27
- # via requests
28
- click==8.3.1
29
- # via
30
- # typer
31
- # uvicorn
32
- colorama==0.4.6 ; sys_platform == 'win32'
33
- # via
34
- # click
35
- # pytest
36
- # tqdm
37
- distro==1.9.0
38
- # via openai
39
- faiss-cpu==1.13.1
40
- # via rag-knowledge-assistant
41
- fastapi==0.124.4
42
- ffmpy==1.0.0
43
- filelock==3.20.1
44
- # via
45
- # huggingface-hub
46
- # torch
47
- # transformers
48
- fsspec==2025.12.0
49
- # via
50
- # huggingface-hub
51
- # torch
52
- googleapis-common-protos==1.72.0
53
- # via opentelemetry-exporter-otlp-proto-http
54
- # via rag-knowledge-assistant
55
- groovy==0.1.2
56
- h11==0.16.0
57
- # via
58
- # httpcore
59
- # uvicorn
60
- hf-xet==1.2.0 ; platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'arm64' or platform_machine == 'x86_64'
61
- # via huggingface-hub
62
- httpcore==1.0.9
63
- # via httpx
64
- httpx==0.28.1
65
- # via
66
- # langfuse
67
- # openai
68
- # safehttpx
69
- huggingface-hub==0.36.0
70
- # via
71
- # accelerate
72
- # sentence-transformers
73
- # tokenizers
74
- # transformers
75
- idna==3.11
76
- # via
77
- # anyio
78
- # httpx
79
- # requests
80
- importlib-metadata==8.7.0
81
- # via opentelemetry-api
82
- iniconfig==2.3.0
83
- # via pytest
84
- jinja2==3.1.6
85
- # via
86
- # torch
87
- jiter==0.12.0
88
- # via openai
89
- joblib==1.5.3
90
- # via scikit-learn
91
- langfuse==3.11.0
92
- # via rag-knowledge-assistant
93
- markdown-it-py==4.0.0
94
- # via rich
95
- markupsafe==3.0.3
96
- # via
97
- # jinja2
98
- mdurl==0.1.2
99
- # via markdown-it-py
100
- mpmath==1.3.0
101
- # via sympy
102
- networkx==3.2.1
103
- # via torch
104
- numpy==2.3.5
105
- # via
106
- # accelerate
107
- # faiss-cpu
108
- # pandas
109
- # rag-knowledge-assistant
110
- # scikit-learn
111
- # scipy
112
- # transformers
113
- nvidia-cublas-cu12==12.8.4.1 ; platform_machine == 'x86_64' and sys_platform == 'linux'
114
- # via
115
- # nvidia-cudnn-cu12
116
- # nvidia-cusolver-cu12
117
- # torch
118
- nvidia-cuda-cupti-cu12==12.8.90 ; platform_machine == 'x86_64' and sys_platform == 'linux'
119
- # via torch
120
- nvidia-cuda-nvrtc-cu12==12.8.93 ; platform_machine == 'x86_64' and sys_platform == 'linux'
121
- # via torch
122
- nvidia-cuda-runtime-cu12==12.8.90 ; platform_machine == 'x86_64' and sys_platform == 'linux'
123
- # via torch
124
- nvidia-cudnn-cu12==9.10.2.21 ; platform_machine == 'x86_64' and sys_platform == 'linux'
125
- # via torch
126
- nvidia-cufft-cu12==11.3.3.83 ; platform_machine == 'x86_64' and sys_platform == 'linux'
127
- # via torch
128
- nvidia-cufile-cu12==1.13.1.3 ; platform_machine == 'x86_64' and sys_platform == 'linux'
129
- # via torch
130
- nvidia-curand-cu12==10.3.9.90 ; platform_machine == 'x86_64' and sys_platform == 'linux'
131
- # via torch
132
- nvidia-cusolver-cu12==11.7.3.90 ; platform_machine == 'x86_64' and sys_platform == 'linux'
133
- # via torch
134
- nvidia-cusparse-cu12==12.5.8.93 ; platform_machine == 'x86_64' and sys_platform == 'linux'
135
- # via
136
- # nvidia-cusolver-cu12
137
- # torch
138
- nvidia-cusparselt-cu12==0.7.1 ; platform_machine == 'x86_64' and sys_platform == 'linux'
139
- # via torch
140
- nvidia-nccl-cu12==2.27.5 ; platform_machine == 'x86_64' and sys_platform == 'linux'
141
- # via torch
142
- nvidia-nvjitlink-cu12==12.8.93 ; platform_machine == 'x86_64' and sys_platform == 'linux'
143
- # via
144
- # nvidia-cufft-cu12
145
- # nvidia-cusolver-cu12
146
- # nvidia-cusparse-cu12
147
- # torch
148
- nvidia-nvshmem-cu12==3.3.20 ; platform_machine == 'x86_64' and sys_platform == 'linux'
149
- # via torch
150
- nvidia-nvtx-cu12==12.8.90 ; platform_machine == 'x86_64' and sys_platform == 'linux'
151
- # via torch
152
- openai==2.13.0
153
- # via
154
- # langfuse
155
- # rag-knowledge-assistant
156
- opentelemetry-api==1.39.1
157
- # via
158
- # langfuse
159
- # opentelemetry-exporter-otlp-proto-http
160
- # opentelemetry-sdk
161
- # opentelemetry-semantic-conventions
162
- opentelemetry-exporter-otlp-proto-common==1.39.1
163
- # via opentelemetry-exporter-otlp-proto-http
164
- opentelemetry-exporter-otlp-proto-http==1.39.1
165
- # via langfuse
166
- opentelemetry-proto==1.39.1
167
- # via
168
- # opentelemetry-exporter-otlp-proto-common
169
- # opentelemetry-exporter-otlp-proto-http
170
- opentelemetry-sdk==1.39.1
171
- # via
172
- # langfuse
173
- # opentelemetry-exporter-otlp-proto-http
174
- opentelemetry-semantic-conventions==0.60b1
175
- # via opentelemetry-sdk
176
- orjson==3.11.5
177
- packaging==25.0
178
- # via
179
- # accelerate
180
- # faiss-cpu
181
- # huggingface-hub
182
- # langfuse
183
- # pytest
184
- # transformers
185
- pandas==2.3.3
186
- pillow==12.0.0
187
- pluggy==1.6.0
188
- # via pytest
189
- protobuf==6.33.2
190
- # via
191
- # googleapis-common-protos
192
- # opentelemetry-proto
193
- psutil==7.1.3
194
- # via accelerate
195
- pydantic==2.12.4
196
- # via
197
- # fastapi
198
- # langfuse
199
- # openai
200
- pydantic-core==2.41.5
201
- # via pydantic
202
- pydub==0.25.1
203
- pygments==2.19.2
204
- # via
205
- # pytest
206
- # rich
207
- pypdf==6.4.2
208
- # via rag-knowledge-assistant
209
- pytest==9.0.2
210
- python-dateutil==2.9.0.post0
211
- # via pandas
212
- python-dotenv==1.2.1
213
- # via rag-knowledge-assistant
214
- python-multipart==0.0.21
215
- pytz==2025.2
216
- # via pandas
217
- pyyaml==6.0.3
218
- # via
219
- # accelerate
220
- # huggingface-hub
221
- # transformers
222
- regex==2025.11.3
223
- # via transformers
224
- requests==2.32.5
225
- # via
226
- # huggingface-hub
227
- # langfuse
228
- # opentelemetry-exporter-otlp-proto-http
229
- # transformers
230
- rich==14.2.0
231
- # via typer
232
- ruff==0.14.9
233
- safehttpx==0.1.7
234
- safetensors==0.7.0
235
- # via
236
- # accelerate
237
- # transformers
238
- scikit-learn==1.8.0
239
- # via sentence-transformers
240
- scipy==1.16.3
241
- # via
242
- # scikit-learn
243
- # sentence-transformers
244
- semantic-version==2.10.0
245
- sentence-transformers==5.2.0
246
- # via rag-knowledge-assistant
247
- setuptools==80.9.0 ; python_full_version >= '3.12'
248
- # via torch
249
- shellingham==1.5.4
250
- # via typer
251
- six==1.17.0
252
- # via python-dateutil
253
- sniffio==1.3.1
254
- # via openai
255
- soupsieve==2.8
256
- # via beautifulsoup4
257
- starlette==0.50.0
258
- # via
259
- # fastapi
260
- sympy==1.14.0
261
- # via torch
262
- threadpoolctl==3.6.0
263
- # via scikit-learn
264
- tokenizers==0.22.1
265
- # via transformers
266
- tomlkit==0.13.3
267
  torch==2.4.0
268
- # via
269
- # accelerate
270
- # rag-knowledge-assistant
271
- # sentence-transformers
272
- tqdm==4.67.1
273
- # via
274
- # huggingface-hub
275
- # openai
276
- # sentence-transformers
277
- # transformers
278
- transformers==4.57.3
279
- # via
280
- # rag-knowledge-assistant
281
- # sentence-transformers
282
- triton==3.5.1 ; platform_machine == 'x86_64' and sys_platform == 'linux'
283
- # via torch
284
- typer==0.20.0
285
- typing-extensions==4.15.0
286
- # via
287
- # anyio
288
- # beautifulsoup4
289
- # fastapi
290
- # huggingface-hub
291
- # openai
292
- # opentelemetry-api
293
- # opentelemetry-exporter-otlp-proto-http
294
- # opentelemetry-sdk
295
- # opentelemetry-semantic-conventions
296
- # pydantic
297
- # pydantic-core
298
- # sentence-transformers
299
- # starlette
300
- # torch
301
- # typer
302
- # typing-inspection
303
- typing-inspection==0.4.2
304
- # via pydantic
305
- tzdata==2025.3
306
- # via pandas
307
- urllib3==2.6.2
308
- # via requests
309
- uvicorn==0.38.0
310
- wrapt==1.17.3
311
- # via langfuse
312
- zipp==3.23.0
313
- # via importlib-metadata
314
  spaces
315
- # Reset build trigger
 
 
 
 
 
 
 
 
1
+ transformers>=4.40.0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2
  torch==2.4.0
3
+ accelerate
4
+ sentence-transformers
5
+ faiss-cpu
6
+ openai
7
+ langfuse
8
+ python-dotenv
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9
  spaces
10
+ pypdf
11
+ beautifulsoup4
12
+ numpy<2.3.0
13
+ scikit-learn
14
+ pandas
15
+ networkx<3.5
16
+ # Add other necessary root deps if missed, but this covers 99% of RAG needs.
17
+ # Allowing pip to resolve versions avoids Py3.10 vs 3.11 conflicts.