| setup: install preprocess augment | |
| setup-runpod: install-runpod preprocess augment | |
| install: | |
| pip3 install --no-cache-dir --break-system-packages -r requirements-cpu.txt | |
| pip3 install --no-cache-dir --break-system-packages -r requirements.txt | |
| install-runpod: | |
| pip3 install --break-system-packages --ignore-installed blinker -r requirements.txt | |
| pip3 install --break-system-packages --force-reinstall "torch>=2.6.0" "torchvision>=0.21.0" | |
| pip3 install --break-system-packages --force-reinstall "numpy<2" | |
| install-mac: | |
| /usr/local/bin/python3 -m pip install --break-system-packages -r requirements.txt | |
| preprocess: | |
| python3 -m src.models.preprocess | |
| augment: | |
| python3 -m src.models.augment --max-length $(or $(MAX_LENGTH),256) | |
| serialize: | |
| python3 -m src.models.serialize --model-dir $(MODEL_DIR) | |
| serialize-marker: | |
| python3 -m src.models.serialize --model-dir models/marker | |
| serialize-qa-m: | |
| python3 -m src.models.serialize --model-dir models/qa_m | |
| serialize-qa-b: | |
| python3 -m src.models.serialize --model-dir models/qa_b | |
| serialize-all: serialize-marker serialize-qa-m serialize-qa-b | |
| inference: | |
| python3 -m src.models.inference --model-dir $(MODEL_DIR) --data $(DATA) --output $(OUTPUT) | |
| inference-marker: | |
| python3 -m src.models.inference --model-dir models/marker --data $(DATA) --output predictions_marker.json | |
| inference-qa-m: | |
| python3 -m src.models.inference --model-dir models/qa_m --data $(DATA) --output predictions_qa_m.json | |
| inference-qa-b: | |
| python3 -m src.models.inference --model-dir models/qa_b --data $(DATA) --output predictions_qa_b.json | |
| inference-all: inference-marker inference-qa-m inference-qa-b | |
| hf-upload: | |
| python3 -m src.models.hf_upload $(if $(MODE),--mode $(MODE)) | |
| hf-upload-marker: | |
| python3 -m src.models.hf_upload --mode marker | |
| hf-upload-qa-m: | |
| python3 -m src.models.hf_upload --mode qa_m | |
| hf-upload-qa-b: | |
| python3 -m src.models.hf_upload --mode qa_b | |
| hf-upload-fasttext: | |
| python3 -m src.models.hf_upload --mode fasttext | |
| hf-upload-all: hf-upload-marker hf-upload-qa-m hf-upload-qa-b hf-upload-fasttext | |
| hf-download: | |
| python3 -m src.models.hf_download $(if $(MODE),--mode $(MODE)) | |
| hf-download-marker: | |
| python3 -m src.models.hf_download --mode marker | |
| hf-download-qa-m: | |
| python3 -m src.models.hf_download --mode qa_m | |
| hf-download-qa-b: | |
| python3 -m src.models.hf_download --mode qa_b | |
| hf-download-fasttext: | |
| python3 -m src.models.hf_download --mode fasttext | |
| hf-download-all: hf-download-marker hf-download-qa-m hf-download-qa-b hf-download-fasttext | |
| train-fasttext: | |
| python3 -m src.models.fasttext | |
| start-be: | |
| uvicorn app:app --host 0.0.0.0 --port 8000 --reload | |
| start-fe: | |
| streamlit run main.py | |