serverdaun commited on
Commit
53ced79
·
1 Parent(s): 818d04a

remove versions from reqs

Browse files
Files changed (1) hide show
  1. requirements.txt +159 -542
requirements.txt CHANGED
@@ -1,542 +1,159 @@
1
- # This file was autogenerated by uv via the following command:
2
- # uv export --frozen --no-hashes -o requirements.txt
3
- aiofiles==24.1.0
4
- # via gradio
5
- aiohappyeyeballs==2.6.1
6
- # via aiohttp
7
- aiohttp==3.12.15
8
- # via
9
- # huggingface-hub
10
- # langchain-community
11
- # llama-index-core
12
- aiosignal==1.4.0
13
- # via aiohttp
14
- aiosqlite==0.21.0
15
- # via llama-index-core
16
- annotated-types==0.7.0
17
- # via pydantic
18
- anyio==4.10.0
19
- # via
20
- # gradio
21
- # httpx
22
- # openai
23
- # starlette
24
- attrs==25.3.0
25
- # via aiohttp
26
- audioop-lts==0.2.2 ; python_full_version >= '3.13'
27
- # via gradio
28
- banks==2.2.0
29
- # via llama-index-core
30
- beautifulsoup4==4.13.4
31
- # via llama-index-readers-file
32
- black==25.1.0
33
- # via rag-w-binary-quant
34
- brotli==1.1.0
35
- # via gradio
36
- certifi==2025.8.3
37
- # via
38
- # httpcore
39
- # httpx
40
- # llama-cloud
41
- # requests
42
- cffi==1.17.1 ; platform_python_implementation == 'PyPy'
43
- # via zstandard
44
- charset-normalizer==3.4.2
45
- # via requests
46
- click==8.2.1
47
- # via
48
- # black
49
- # llama-cloud-services
50
- # nltk
51
- # typer
52
- # uvicorn
53
- colorama==0.4.6
54
- # via
55
- # click
56
- # griffe
57
- # tqdm
58
- dataclasses-json==0.6.7
59
- # via
60
- # langchain-community
61
- # llama-index-core
62
- defusedxml==0.7.1
63
- # via llama-index-readers-file
64
- deprecated==1.2.18
65
- # via
66
- # banks
67
- # llama-index-core
68
- # llama-index-instrumentation
69
- dirtyjson==1.0.8
70
- # via llama-index-core
71
- distro==1.9.0
72
- # via openai
73
- docx2txt==0.9
74
- # via rag-w-binary-quant
75
- dotenv==0.9.9
76
- # via rag-w-binary-quant
77
- fastapi==0.116.1
78
- # via gradio
79
- ffmpy==0.6.1
80
- # via gradio
81
- filelock==3.18.0
82
- # via
83
- # huggingface-hub
84
- # torch
85
- # transformers
86
- filetype==1.2.0
87
- # via llama-index-core
88
- frozenlist==1.7.0
89
- # via
90
- # aiohttp
91
- # aiosignal
92
- fsspec==2025.7.0
93
- # via
94
- # gradio-client
95
- # huggingface-hub
96
- # llama-index-core
97
- # torch
98
- gradio==5.41.0
99
- # via rag-w-binary-quant
100
- gradio-client==1.11.0
101
- # via gradio
102
- greenlet==3.2.3
103
- # via sqlalchemy
104
- griffe==1.9.0
105
- # via banks
106
- groovy==0.1.2
107
- # via gradio
108
- grpcio==1.67.1
109
- # via pymilvus
110
- h11==0.16.0
111
- # via
112
- # httpcore
113
- # uvicorn
114
- hf-xet==1.1.5 ; platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'arm64' or platform_machine == 'x86_64'
115
- # via huggingface-hub
116
- httpcore==1.0.9
117
- # via httpx
118
- httpx==0.28.1
119
- # via
120
- # gradio
121
- # gradio-client
122
- # langsmith
123
- # llama-cloud
124
- # llama-index-core
125
- # openai
126
- # safehttpx
127
- httpx-sse==0.4.1
128
- # via langchain-community
129
- huggingface-hub==0.34.3
130
- # via
131
- # gradio
132
- # gradio-client
133
- # llama-index-embeddings-huggingface
134
- # sentence-transformers
135
- # tokenizers
136
- # transformers
137
- idna==3.10
138
- # via
139
- # anyio
140
- # httpx
141
- # requests
142
- # yarl
143
- isort==6.0.1
144
- # via rag-w-binary-quant
145
- jinja2==3.1.6
146
- # via
147
- # banks
148
- # gradio
149
- # torch
150
- jiter==0.10.0
151
- # via openai
152
- joblib==1.5.1
153
- # via
154
- # nltk
155
- # scikit-learn
156
- jsonpatch==1.33
157
- # via langchain-core
158
- jsonpointer==3.0.0
159
- # via jsonpatch
160
- langchain==0.3.27
161
- # via
162
- # langchain-community
163
- # rag-w-binary-quant
164
- langchain-community==0.3.27
165
- # via rag-w-binary-quant
166
- langchain-core==0.3.72
167
- # via
168
- # langchain
169
- # langchain-community
170
- # langchain-openai
171
- # langchain-text-splitters
172
- langchain-openai==0.3.28
173
- # via rag-w-binary-quant
174
- langchain-text-splitters==0.3.9
175
- # via langchain
176
- langsmith==0.4.10
177
- # via
178
- # langchain
179
- # langchain-community
180
- # langchain-core
181
- llama-cloud==0.1.35
182
- # via
183
- # llama-cloud-services
184
- # llama-index-indices-managed-llama-cloud
185
- llama-cloud-services==0.6.54
186
- # via llama-parse
187
- llama-index==0.13.0
188
- # via rag-w-binary-quant
189
- llama-index-cli==0.5.0
190
- # via llama-index
191
- llama-index-core==0.13.0
192
- # via
193
- # llama-cloud-services
194
- # llama-index
195
- # llama-index-cli
196
- # llama-index-embeddings-huggingface
197
- # llama-index-embeddings-openai
198
- # llama-index-indices-managed-llama-cloud
199
- # llama-index-llms-openai
200
- # llama-index-readers-file
201
- # llama-index-readers-llama-parse
202
- llama-index-embeddings-huggingface==0.6.0
203
- # via rag-w-binary-quant
204
- llama-index-embeddings-openai==0.5.0
205
- # via
206
- # llama-index
207
- # llama-index-cli
208
- llama-index-indices-managed-llama-cloud==0.9.0
209
- # via llama-index
210
- llama-index-instrumentation==0.4.0
211
- # via llama-index-workflows
212
- llama-index-llms-openai==0.5.0
213
- # via
214
- # llama-index
215
- # llama-index-cli
216
- llama-index-readers-file==0.5.0
217
- # via llama-index
218
- llama-index-readers-llama-parse==0.5.0
219
- # via llama-index
220
- llama-index-workflows==1.2.0
221
- # via llama-index-core
222
- llama-parse==0.6.54
223
- # via llama-index-readers-llama-parse
224
- markdown-it-py==3.0.0 ; sys_platform != 'emscripten'
225
- # via rich
226
- markupsafe==3.0.2
227
- # via
228
- # gradio
229
- # jinja2
230
- marshmallow==3.26.1
231
- # via dataclasses-json
232
- mdurl==0.1.2 ; sys_platform != 'emscripten'
233
- # via markdown-it-py
234
- milvus-lite==2.5.1 ; sys_platform != 'win32'
235
- # via pymilvus
236
- mpmath==1.3.0
237
- # via sympy
238
- multidict==6.6.3
239
- # via
240
- # aiohttp
241
- # yarl
242
- mypy-extensions==1.1.0
243
- # via
244
- # black
245
- # typing-inspect
246
- nest-asyncio==1.6.0
247
- # via llama-index-core
248
- networkx==3.5
249
- # via
250
- # llama-index-core
251
- # torch
252
- nltk==3.9.1
253
- # via
254
- # llama-index
255
- # llama-index-core
256
- numpy==2.3.2
257
- # via
258
- # gradio
259
- # langchain-community
260
- # llama-index-core
261
- # pandas
262
- # rag-w-binary-quant
263
- # scikit-learn
264
- # scipy
265
- # transformers
266
- nvidia-cublas-cu12==12.6.4.1 ; platform_machine == 'x86_64' and sys_platform == 'linux'
267
- # via
268
- # nvidia-cudnn-cu12
269
- # nvidia-cusolver-cu12
270
- # torch
271
- nvidia-cuda-cupti-cu12==12.6.80 ; platform_machine == 'x86_64' and sys_platform == 'linux'
272
- # via torch
273
- nvidia-cuda-nvrtc-cu12==12.6.77 ; platform_machine == 'x86_64' and sys_platform == 'linux'
274
- # via torch
275
- nvidia-cuda-runtime-cu12==12.6.77 ; platform_machine == 'x86_64' and sys_platform == 'linux'
276
- # via torch
277
- nvidia-cudnn-cu12==9.5.1.17 ; platform_machine == 'x86_64' and sys_platform == 'linux'
278
- # via torch
279
- nvidia-cufft-cu12==11.3.0.4 ; platform_machine == 'x86_64' and sys_platform == 'linux'
280
- # via torch
281
- nvidia-cufile-cu12==1.11.1.6 ; platform_machine == 'x86_64' and sys_platform == 'linux'
282
- # via torch
283
- nvidia-curand-cu12==10.3.7.77 ; platform_machine == 'x86_64' and sys_platform == 'linux'
284
- # via torch
285
- nvidia-cusolver-cu12==11.7.1.2 ; platform_machine == 'x86_64' and sys_platform == 'linux'
286
- # via torch
287
- nvidia-cusparse-cu12==12.5.4.2 ; platform_machine == 'x86_64' and sys_platform == 'linux'
288
- # via
289
- # nvidia-cusolver-cu12
290
- # torch
291
- nvidia-cusparselt-cu12==0.6.3 ; platform_machine == 'x86_64' and sys_platform == 'linux'
292
- # via torch
293
- nvidia-nccl-cu12==2.26.2 ; platform_machine == 'x86_64' and sys_platform == 'linux'
294
- # via torch
295
- nvidia-nvjitlink-cu12==12.6.85 ; platform_machine == 'x86_64' and sys_platform == 'linux'
296
- # via
297
- # nvidia-cufft-cu12
298
- # nvidia-cusolver-cu12
299
- # nvidia-cusparse-cu12
300
- # torch
301
- nvidia-nvtx-cu12==12.6.77 ; platform_machine == 'x86_64' and sys_platform == 'linux'
302
- # via torch
303
- openai==1.98.0
304
- # via
305
- # langchain-openai
306
- # llama-index-embeddings-openai
307
- # llama-index-llms-openai
308
- orjson==3.11.1
309
- # via
310
- # gradio
311
- # langsmith
312
- packaging==25.0
313
- # via
314
- # black
315
- # gradio
316
- # gradio-client
317
- # huggingface-hub
318
- # langchain-core
319
- # langsmith
320
- # marshmallow
321
- # transformers
322
- pandas==2.2.3
323
- # via
324
- # gradio
325
- # llama-index-readers-file
326
- # pymilvus
327
- pathspec==0.12.1
328
- # via black
329
- pillow==11.3.0
330
- # via
331
- # gradio
332
- # llama-index-core
333
- # sentence-transformers
334
- platformdirs==4.3.8
335
- # via
336
- # banks
337
- # black
338
- # llama-cloud-services
339
- # llama-index-core
340
- propcache==0.3.2
341
- # via
342
- # aiohttp
343
- # yarl
344
- protobuf==6.31.1
345
- # via pymilvus
346
- pycparser==2.22 ; platform_python_implementation == 'PyPy'
347
- # via cffi
348
- pydantic==2.11.7
349
- # via
350
- # banks
351
- # fastapi
352
- # gradio
353
- # langchain
354
- # langchain-core
355
- # langsmith
356
- # llama-cloud
357
- # llama-cloud-services
358
- # llama-index-core
359
- # llama-index-instrumentation
360
- # llama-index-workflows
361
- # openai
362
- # pydantic-settings
363
- pydantic-core==2.33.2
364
- # via pydantic
365
- pydantic-settings==2.10.1
366
- # via langchain-community
367
- pydub==0.25.1
368
- # via gradio
369
- pygments==2.19.2 ; sys_platform != 'emscripten'
370
- # via rich
371
- pymilvus==2.5.14
372
- # via rag-w-binary-quant
373
- pypdf==5.9.0
374
- # via llama-index-readers-file
375
- python-dateutil==2.9.0.post0
376
- # via pandas
377
- python-dotenv==1.1.1
378
- # via
379
- # dotenv
380
- # llama-cloud-services
381
- # pydantic-settings
382
- # pymilvus
383
- python-multipart==0.0.20
384
- # via gradio
385
- pytz==2025.2
386
- # via pandas
387
- pyyaml==6.0.2
388
- # via
389
- # gradio
390
- # huggingface-hub
391
- # langchain
392
- # langchain-community
393
- # langchain-core
394
- # llama-index-core
395
- # transformers
396
- regex==2025.7.34
397
- # via
398
- # nltk
399
- # tiktoken
400
- # transformers
401
- requests==2.32.4
402
- # via
403
- # huggingface-hub
404
- # langchain
405
- # langchain-community
406
- # langsmith
407
- # llama-index-core
408
- # requests-toolbelt
409
- # tiktoken
410
- # transformers
411
- requests-toolbelt==1.0.0
412
- # via langsmith
413
- rich==14.1.0 ; sys_platform != 'emscripten'
414
- # via typer
415
- ruff==0.12.7 ; sys_platform != 'emscripten'
416
- # via gradio
417
- safehttpx==0.1.6
418
- # via gradio
419
- safetensors==0.5.3
420
- # via transformers
421
- scikit-learn==1.7.1
422
- # via sentence-transformers
423
- scipy==1.16.1
424
- # via
425
- # scikit-learn
426
- # sentence-transformers
427
- semantic-version==2.10.0
428
- # via gradio
429
- sentence-transformers==5.0.0
430
- # via llama-index-embeddings-huggingface
431
- setuptools==80.9.0
432
- # via
433
- # llama-index-core
434
- # pymilvus
435
- # torch
436
- # triton
437
- shellingham==1.5.4 ; sys_platform != 'emscripten'
438
- # via typer
439
- six==1.17.0
440
- # via python-dateutil
441
- sniffio==1.3.1
442
- # via
443
- # anyio
444
- # openai
445
- soupsieve==2.7
446
- # via beautifulsoup4
447
- sqlalchemy==2.0.42
448
- # via
449
- # langchain
450
- # langchain-community
451
- # llama-index-core
452
- starlette==0.47.2
453
- # via
454
- # fastapi
455
- # gradio
456
- striprtf==0.0.26
457
- # via llama-index-readers-file
458
- sympy==1.14.0
459
- # via torch
460
- tenacity==9.1.2
461
- # via
462
- # langchain-community
463
- # langchain-core
464
- # llama-cloud-services
465
- # llama-index-core
466
- threadpoolctl==3.6.0
467
- # via scikit-learn
468
- tiktoken==0.9.0
469
- # via
470
- # langchain-openai
471
- # llama-index-core
472
- tokenizers==0.21.4
473
- # via transformers
474
- tomlkit==0.13.3
475
- # via gradio
476
- torch==2.7.1
477
- # via sentence-transformers
478
- tqdm==4.67.1
479
- # via
480
- # huggingface-hub
481
- # llama-index-core
482
- # milvus-lite
483
- # nltk
484
- # openai
485
- # sentence-transformers
486
- # transformers
487
- transformers==4.54.1
488
- # via sentence-transformers
489
- triton==3.3.1 ; platform_machine == 'x86_64' and sys_platform == 'linux'
490
- # via torch
491
- typer==0.16.0 ; sys_platform != 'emscripten'
492
- # via gradio
493
- typing-extensions==4.14.1
494
- # via
495
- # aiosignal
496
- # aiosqlite
497
- # anyio
498
- # beautifulsoup4
499
- # fastapi
500
- # gradio
501
- # gradio-client
502
- # huggingface-hub
503
- # langchain-core
504
- # llama-index-core
505
- # openai
506
- # pydantic
507
- # pydantic-core
508
- # sentence-transformers
509
- # sqlalchemy
510
- # starlette
511
- # torch
512
- # typer
513
- # typing-inspect
514
- # typing-inspection
515
- typing-inspect==0.9.0
516
- # via
517
- # dataclasses-json
518
- # llama-index-core
519
- typing-inspection==0.4.1
520
- # via
521
- # pydantic
522
- # pydantic-settings
523
- tzdata==2025.2
524
- # via pandas
525
- ujson==5.10.0
526
- # via pymilvus
527
- urllib3==2.5.0
528
- # via
529
- # gradio
530
- # requests
531
- uvicorn==0.35.0 ; sys_platform != 'emscripten'
532
- # via gradio
533
- websockets==15.0.1
534
- # via gradio-client
535
- wrapt==1.17.2
536
- # via
537
- # deprecated
538
- # llama-index-core
539
- yarl==1.20.1
540
- # via aiohttp
541
- zstandard==0.23.0
542
- # via langsmith
 
1
+ aiofiles
2
+ aiohappyeyeballs
3
+ aiohttp
4
+ aiosignal
5
+ aiosqlite
6
+ annotated-types
7
+ anyio
8
+ attrs
9
+ audioop-lts
10
+ banks
11
+ beautifulsoup4
12
+ black
13
+ brotli
14
+ certifi
15
+ cffi
16
+ charset-normalizer
17
+ click
18
+ colorama
19
+ dataclasses-json
20
+ defusedxml
21
+ deprecated
22
+ dirtyjson
23
+ distro
24
+ docx2txt
25
+ dotenv
26
+ fastapi
27
+ ffmpy
28
+ filelock
29
+ filetype
30
+ frozenlist
31
+ fsspec
32
+ gradio
33
+ gradio-client
34
+ greenlet
35
+ griffe
36
+ groovy
37
+ grpcio
38
+ h11
39
+ hf-xet
40
+ httpcore
41
+ httpx
42
+ httpx-sse
43
+ huggingface-hub
44
+ idna
45
+ isort
46
+ jinja2
47
+ jiter
48
+ joblib
49
+ jsonpatch
50
+ jsonpointer
51
+ langchain
52
+ langchain-community
53
+ langchain-core
54
+ langchain-openai
55
+ langchain-text-splitters
56
+ langsmith
57
+ llama-cloud
58
+ llama-cloud-services
59
+ llama-index
60
+ llama-index-cli
61
+ llama-index-core
62
+ llama-index-embeddings-huggingface
63
+ llama-index-embeddings-openai
64
+ llama-index-indices-managed-llama-cloud
65
+ llama-index-instrumentation
66
+ llama-index-llms-openai
67
+ llama-index-readers-file
68
+ llama-index-readers-llama-parse
69
+ llama-index-workflows
70
+ llama-parse
71
+ markdown-it-py
72
+ markupsafe
73
+ marshmallow
74
+ mdurl
75
+ milvus-lite
76
+ mpmath
77
+ multidict
78
+ mypy-extensions
79
+ nest-asyncio
80
+ networkx
81
+ nltk
82
+ numpy
83
+ nvidia-cublas-cu12
84
+ nvidia-cuda-cupti-cu12
85
+ nvidia-cuda-nvrtc-cu12
86
+ nvidia-cuda-runtime-cu12
87
+ nvidia-cudnn-cu12
88
+ nvidia-cufft-cu12
89
+ nvidia-cufile-cu12
90
+ nvidia-curand-cu12
91
+ nvidia-cusolver-cu12
92
+ nvidia-cusparse-cu12
93
+ nvidia-cusparselt-cu12
94
+ nvidia-nccl-cu12
95
+ nvidia-nvjitlink-cu12
96
+ nvidia-nvtx-cu12
97
+ openai
98
+ orjson
99
+ packaging
100
+ pandas
101
+ pathspec
102
+ pillow
103
+ platformdirs
104
+ propcache
105
+ protobuf
106
+ pycparser
107
+ pydantic
108
+ pydantic-core
109
+ pydantic-settings
110
+ pydub
111
+ pygments
112
+ pymilvus
113
+ pypdf
114
+ python-dateutil
115
+ python-dotenv
116
+ python-multipart
117
+ pytz
118
+ pyyaml
119
+ regex
120
+ requests
121
+ requests-toolbelt
122
+ rich
123
+ ruff
124
+ safehttpx
125
+ safetensors
126
+ scikit-learn
127
+ scipy
128
+ semantic-version
129
+ sentence-transformers
130
+ setuptools
131
+ shellingham
132
+ six
133
+ sniffio
134
+ soupsieve
135
+ sqlalchemy
136
+ starlette
137
+ striprtf
138
+ sympy
139
+ tenacity
140
+ threadpoolctl
141
+ tiktoken
142
+ tokenizers
143
+ tomlkit
144
+ torch
145
+ tqdm
146
+ transformers
147
+ triton
148
+ typer
149
+ typing-extensions
150
+ typing-inspect
151
+ typing-inspection
152
+ tzdata
153
+ ujson
154
+ urllib3
155
+ uvicorn
156
+ websockets
157
+ wrapt
158
+ yarl
159
+ zstandard