Mario Faúndez Vidal commited on
Commit
65dc75d
·
1 Parent(s): f1f6f45

feat: add dependency management commands to Makefile

Browse files

- Add 'make requirements' to generate requirements.txt from uv.lock
- Add 'make update' to upgrade all dependencies
- Add 'make upgrade PKG=name' to upgrade specific package
- Update requirements.txt with complete dependency tree from uv
- Update uv.lock with latest package resolutions

Files changed (3) hide show
  1. Makefile +14 -1
  2. requirements.txt +481 -4
  3. uv.lock +0 -0
Makefile CHANGED
@@ -1,4 +1,4 @@
1
- .PHONY: help install dev clean test format lint status run notebook docker-build docker-run
2
 
3
  IMAGE_NAME := classify-text-with-bert-hate-speech
4
  IMAGE_TAG := local
@@ -13,6 +13,8 @@ help:
13
  @echo ""
14
  @echo " make install - Install production dependencies with uv"
15
  @echo " make dev - Install development dependencies with uv"
 
 
16
  @echo " make run - Run the application (app.py)"
17
  @echo " make notebook - Launch jupyter notebook/lab"
18
  @echo " make test - Run tests (requires pytest)"
@@ -34,6 +36,17 @@ dev:
34
  uv sync --dev
35
  @echo "✅ Development dependencies installed successfully!"
36
 
 
 
 
 
 
 
 
 
 
 
 
37
  run:
38
  @echo "Running app.py..."
39
  uv run python app.py
 
1
+ .PHONY: help install dev clean test format lint status run notebook docker-build docker-run requirements update
2
 
3
  IMAGE_NAME := classify-text-with-bert-hate-speech
4
  IMAGE_TAG := local
 
13
  @echo ""
14
  @echo " make install - Install production dependencies with uv"
15
  @echo " make dev - Install development dependencies with uv"
16
+ @echo " make requirements- Generate requirements.txt from uv.lock"
17
+ @echo " make update - Update all dependencies to latest versions"
18
  @echo " make run - Run the application (app.py)"
19
  @echo " make notebook - Launch jupyter notebook/lab"
20
  @echo " make test - Run tests (requires pytest)"
 
36
  uv sync --dev
37
  @echo "✅ Development dependencies installed successfully!"
38
 
39
+ requirements: update
40
+ @echo "📝 Generating requirements.txt from uv.lock..."
41
+ uv pip compile pyproject.toml -o requirements.txt
42
+ @echo "✅ requirements.txt generated successfully!"
43
+
44
+ update: dev
45
+ @echo "🔄 Updating all dependencies to latest versions..."
46
+ uv lock --upgrade
47
+ @echo "✅ Dependencies updated! Run 'make install' or 'make dev' to apply changes."
48
+ @echo "💡 Tip: Run 'make requirements' to regenerate requirements.txt"
49
+
50
  run:
51
  @echo "Running app.py..."
52
  uv run python app.py
requirements.txt CHANGED
@@ -1,6 +1,483 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  gradio==3.20.1
2
- tensorflow==2.15.0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3
  tensorflow-hub==0.15.0
4
- tensorflow-text==2.15.0
5
- tf-models-official==2.15.0
6
- protobuf==3.20.3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This file was autogenerated by uv via the following command:
2
+ # uv pip compile pyproject.toml -o requirements.txt
3
+ absl-py==1.4.0
4
+ # via
5
+ # dm-tree
6
+ # etils
7
+ # keras
8
+ # tensorboard
9
+ # tensorflow
10
+ # tensorflow-datasets
11
+ # tensorflow-metadata
12
+ # tensorflow-model-optimization
13
+ # tf-slim
14
+ ai-edge-litert==2.0.2
15
+ # via tf-models-official
16
+ aiofiles==25.1.0
17
+ # via gradio
18
+ aiohappyeyeballs==2.6.1
19
+ # via aiohttp
20
+ aiohttp==3.13.0
21
+ # via gradio
22
+ aiosignal==1.4.0
23
+ # via aiohttp
24
+ altair==5.5.0
25
+ # via gradio
26
+ annotated-types==0.7.0
27
+ # via pydantic
28
+ anyio==4.11.0
29
+ # via
30
+ # httpx
31
+ # starlette
32
+ astunparse==1.6.3
33
+ # via tensorflow
34
+ attrs==25.4.0
35
+ # via
36
+ # aiohttp
37
+ # dm-tree
38
+ # jsonschema
39
+ # referencing
40
+ backports-strenum==1.3.1
41
+ # via ai-edge-litert
42
+ bleach==6.2.0
43
+ # via kaggle
44
+ cachetools==6.2.0
45
+ # via google-auth
46
+ certifi==2025.10.5
47
+ # via
48
+ # httpcore
49
+ # httpx
50
+ # kaggle
51
+ # requests
52
+ charset-normalizer==3.4.3
53
+ # via
54
+ # kaggle
55
+ # requests
56
+ click==8.3.0
57
+ # via uvicorn
58
+ colorama==0.4.6
59
+ # via sacrebleu
60
+ contourpy==1.3.3
61
+ # via matplotlib
62
+ cycler==0.12.1
63
+ # via matplotlib
64
+ cython==3.1.4
65
+ # via tf-models-official
66
+ dm-tree==0.1.9
67
+ # via
68
+ # tensorflow-datasets
69
+ # tensorflow-model-optimization
70
+ docstring-parser==0.17.0
71
+ # via simple-parsing
72
+ einops==0.8.1
73
+ # via etils
74
+ etils==1.13.0
75
+ # via tensorflow-datasets
76
+ fastapi==0.118.3
77
+ # via gradio
78
+ ffmpy==0.6.2
79
+ # via gradio
80
+ flatbuffers==25.9.23
81
+ # via
82
+ # ai-edge-litert
83
+ # tensorflow
84
+ fonttools==4.60.1
85
+ # via matplotlib
86
+ frozenlist==1.8.0
87
+ # via
88
+ # aiohttp
89
+ # aiosignal
90
+ fsspec==2025.9.0
91
+ # via
92
+ # etils
93
+ # gradio
94
+ gast==0.6.0
95
+ # via tensorflow
96
+ gin-config==0.5.0
97
+ # via tf-models-official
98
+ google-api-core==2.26.0
99
+ # via google-api-python-client
100
+ google-api-python-client==2.184.0
101
+ # via tf-models-official
102
+ google-auth==2.41.1
103
+ # via
104
+ # google-api-core
105
+ # google-api-python-client
106
+ # google-auth-httplib2
107
+ google-auth-httplib2==0.2.0
108
+ # via google-api-python-client
109
+ google-pasta==0.2.0
110
+ # via tensorflow
111
+ googleapis-common-protos==1.70.0
112
+ # via
113
+ # google-api-core
114
+ # tensorflow-metadata
115
  gradio==3.20.1
116
+ # via classify-text-with-bert-hate-speech (pyproject.toml)
117
+ grpcio==1.75.1
118
+ # via
119
+ # tensorboard
120
+ # tensorflow
121
+ h11==0.16.0
122
+ # via
123
+ # httpcore
124
+ # uvicorn
125
+ h5py==3.14.0
126
+ # via
127
+ # keras
128
+ # tensorflow
129
+ httpcore==1.0.9
130
+ # via httpx
131
+ httplib2==0.31.0
132
+ # via
133
+ # google-api-python-client
134
+ # google-auth-httplib2
135
+ # oauth2client
136
+ httpx==0.28.1
137
+ # via gradio
138
+ idna==3.10
139
+ # via
140
+ # anyio
141
+ # httpx
142
+ # kaggle
143
+ # requests
144
+ # yarl
145
+ immutabledict==4.2.1
146
+ # via
147
+ # tensorflow-datasets
148
+ # tf-models-official
149
+ importlib-resources==6.5.2
150
+ # via etils
151
+ jinja2==3.1.6
152
+ # via
153
+ # altair
154
+ # gradio
155
+ joblib==1.5.2
156
+ # via scikit-learn
157
+ jsonschema==4.25.1
158
+ # via altair
159
+ jsonschema-specifications==2025.9.1
160
+ # via jsonschema
161
+ kaggle==1.7.4.5
162
+ # via tf-models-official
163
+ keras==3.11.3
164
+ # via tensorflow
165
+ kiwisolver==1.4.9
166
+ # via matplotlib
167
+ libclang==18.1.1
168
+ # via tensorflow
169
+ linkify-it-py==2.0.3
170
+ # via markdown-it-py
171
+ lxml==6.0.2
172
+ # via sacrebleu
173
+ markdown==3.9
174
+ # via tensorboard
175
+ markdown-it-py==2.2.0
176
+ # via
177
+ # gradio
178
+ # mdit-py-plugins
179
+ # rich
180
+ markupsafe==3.0.3
181
+ # via
182
+ # gradio
183
+ # jinja2
184
+ # werkzeug
185
+ matplotlib==3.10.7
186
+ # via
187
+ # gradio
188
+ # tf-models-official
189
+ mdit-py-plugins==0.3.3
190
+ # via gradio
191
+ mdurl==0.1.2
192
+ # via markdown-it-py
193
+ ml-dtypes==0.5.3
194
+ # via
195
+ # keras
196
+ # tensorflow
197
+ multidict==6.7.0
198
+ # via
199
+ # aiohttp
200
+ # yarl
201
+ namex==0.1.0
202
+ # via keras
203
+ narwhals==2.7.0
204
+ # via altair
205
+ numpy==1.26.4
206
+ # via
207
+ # ai-edge-litert
208
+ # contourpy
209
+ # dm-tree
210
+ # etils
211
+ # gradio
212
+ # h5py
213
+ # keras
214
+ # matplotlib
215
+ # ml-dtypes
216
+ # opencv-python-headless
217
+ # pandas
218
+ # pycocotools
219
+ # sacrebleu
220
+ # scikit-learn
221
+ # scipy
222
+ # seqeval
223
+ # tensorboard
224
+ # tensorflow
225
+ # tensorflow-datasets
226
+ # tensorflow-hub
227
+ # tensorflow-model-optimization
228
+ # tf-models-official
229
+ oauth2client==4.1.3
230
+ # via tf-models-official
231
+ opencv-python-headless==4.11.0.86
232
+ # via tf-models-official
233
+ opt-einsum==3.4.0
234
+ # via tensorflow
235
+ optree==0.17.0
236
+ # via keras
237
+ orjson==3.11.3
238
+ # via gradio
239
+ packaging==25.0
240
+ # via
241
+ # altair
242
+ # keras
243
+ # matplotlib
244
+ # tensorboard
245
+ # tensorflow
246
+ pandas==2.3.3
247
+ # via
248
+ # gradio
249
+ # tf-models-official
250
+ pillow==11.3.0
251
+ # via
252
+ # gradio
253
+ # matplotlib
254
+ # tf-models-official
255
+ portalocker==3.2.0
256
+ # via sacrebleu
257
+ promise==2.3
258
+ # via tensorflow-datasets
259
+ propcache==0.4.1
260
+ # via
261
+ # aiohttp
262
+ # yarl
263
+ proto-plus==1.26.1
264
+ # via google-api-core
265
+ protobuf==5.29.5
266
+ # via
267
+ # google-api-core
268
+ # googleapis-common-protos
269
+ # kaggle
270
+ # proto-plus
271
+ # tensorboard
272
+ # tensorflow
273
+ # tensorflow-datasets
274
+ # tensorflow-hub
275
+ # tensorflow-metadata
276
+ psutil==7.1.0
277
+ # via
278
+ # tensorflow-datasets
279
+ # tf-models-official
280
+ py-cpuinfo==9.0.0
281
+ # via tf-models-official
282
+ pyarrow==21.0.0
283
+ # via tensorflow-datasets
284
+ pyasn1==0.6.1
285
+ # via
286
+ # oauth2client
287
+ # pyasn1-modules
288
+ # rsa
289
+ pyasn1-modules==0.4.2
290
+ # via
291
+ # google-auth
292
+ # oauth2client
293
+ pycocotools==2.0.10
294
+ # via tf-models-official
295
+ pycryptodome==3.23.0
296
+ # via gradio
297
+ pydantic==2.12.0
298
+ # via
299
+ # fastapi
300
+ # gradio
301
+ pydantic-core==2.41.1
302
+ # via pydantic
303
+ pydub==0.25.1
304
+ # via gradio
305
+ pygments==2.19.2
306
+ # via rich
307
+ pyparsing==3.2.5
308
+ # via
309
+ # httplib2
310
+ # matplotlib
311
+ python-dateutil==2.9.0.post0
312
+ # via
313
+ # kaggle
314
+ # matplotlib
315
+ # pandas
316
+ python-multipart==0.0.20
317
+ # via gradio
318
+ python-slugify==8.0.4
319
+ # via kaggle
320
+ pytz==2025.2
321
+ # via pandas
322
+ pyyaml==6.0.3
323
+ # via
324
+ # gradio
325
+ # tf-models-official
326
+ referencing==0.36.2
327
+ # via
328
+ # jsonschema
329
+ # jsonschema-specifications
330
+ regex==2025.9.18
331
+ # via sacrebleu
332
+ requests==2.32.5
333
+ # via
334
+ # google-api-core
335
+ # gradio
336
+ # kaggle
337
+ # tensorflow
338
+ # tensorflow-datasets
339
+ rich==14.2.0
340
+ # via keras
341
+ rpds-py==0.27.1
342
+ # via
343
+ # jsonschema
344
+ # referencing
345
+ rsa==4.9.1
346
+ # via
347
+ # google-auth
348
+ # oauth2client
349
+ sacrebleu==2.5.1
350
+ # via tf-models-official
351
+ scikit-learn==1.7.2
352
+ # via seqeval
353
+ scipy==1.16.2
354
+ # via
355
+ # scikit-learn
356
+ # tf-models-official
357
+ sentencepiece==0.2.1
358
+ # via tf-models-official
359
+ seqeval==1.2.2
360
+ # via tf-models-official
361
+ setuptools==80.9.0
362
+ # via
363
+ # kaggle
364
+ # tensorboard
365
+ # tensorflow
366
+ simple-parsing==0.1.7
367
+ # via tensorflow-datasets
368
+ six==1.17.0
369
+ # via
370
+ # astunparse
371
+ # google-pasta
372
+ # kaggle
373
+ # oauth2client
374
+ # promise
375
+ # python-dateutil
376
+ # tensorboard
377
+ # tensorflow
378
+ # tensorflow-model-optimization
379
+ # tf-models-official
380
+ sniffio==1.3.1
381
+ # via anyio
382
+ starlette==0.48.0
383
+ # via fastapi
384
+ tabulate==0.9.0
385
+ # via sacrebleu
386
+ tensorboard==2.19.0
387
+ # via tensorflow
388
+ tensorboard-data-server==0.7.2
389
+ # via tensorboard
390
+ tensorflow==2.19.1
391
+ # via
392
+ # tensorflow-text
393
+ # tf-keras
394
+ # tf-models-official
395
+ tensorflow-datasets==4.9.9
396
+ # via tf-models-official
397
  tensorflow-hub==0.15.0
398
+ # via tf-models-official
399
+ tensorflow-io-gcs-filesystem==0.37.1
400
+ # via tensorflow
401
+ tensorflow-metadata==1.17.2
402
+ # via tensorflow-datasets
403
+ tensorflow-model-optimization==0.8.0
404
+ # via tf-models-official
405
+ tensorflow-text==2.19.0
406
+ # via
407
+ # classify-text-with-bert-hate-speech (pyproject.toml)
408
+ # tf-models-official
409
+ termcolor==3.1.0
410
+ # via
411
+ # tensorflow
412
+ # tensorflow-datasets
413
+ text-unidecode==1.3
414
+ # via
415
+ # kaggle
416
+ # python-slugify
417
+ tf-keras==2.19.0
418
+ # via tf-models-official
419
+ tf-models-official==2.19.1
420
+ # via classify-text-with-bert-hate-speech (pyproject.toml)
421
+ tf-slim==1.1.0
422
+ # via tf-models-official
423
+ threadpoolctl==3.6.0
424
+ # via scikit-learn
425
+ toml==0.10.2
426
+ # via tensorflow-datasets
427
+ tqdm==4.67.1
428
+ # via
429
+ # ai-edge-litert
430
+ # etils
431
+ # kaggle
432
+ # tensorflow-datasets
433
+ typing-extensions==4.15.0
434
+ # via
435
+ # ai-edge-litert
436
+ # aiosignal
437
+ # altair
438
+ # anyio
439
+ # etils
440
+ # fastapi
441
+ # gradio
442
+ # grpcio
443
+ # optree
444
+ # pydantic
445
+ # pydantic-core
446
+ # referencing
447
+ # simple-parsing
448
+ # starlette
449
+ # tensorflow
450
+ # typing-inspection
451
+ typing-inspection==0.4.2
452
+ # via pydantic
453
+ tzdata==2025.2
454
+ # via pandas
455
+ uc-micro-py==1.0.3
456
+ # via linkify-it-py
457
+ uritemplate==4.2.0
458
+ # via google-api-python-client
459
+ urllib3==2.5.0
460
+ # via
461
+ # kaggle
462
+ # requests
463
+ uvicorn==0.37.0
464
+ # via gradio
465
+ webencodings==0.5.1
466
+ # via
467
+ # bleach
468
+ # kaggle
469
+ websockets==15.0.1
470
+ # via gradio
471
+ werkzeug==3.1.3
472
+ # via tensorboard
473
+ wheel==0.45.1
474
+ # via astunparse
475
+ wrapt==1.17.3
476
+ # via
477
+ # dm-tree
478
+ # tensorflow
479
+ # tensorflow-datasets
480
+ yarl==1.22.0
481
+ # via aiohttp
482
+ zipp==3.23.0
483
+ # via etils
uv.lock CHANGED
The diff for this file is too large to render. See raw diff