| | |
| | FROM python:3.10-slim |
| |
|
| | |
| | ENV DEBIAN_FRONTEND=noninteractive |
| |
|
| | |
| | RUN apt-get update && apt-get install -y \ |
| | git \ |
| | cmake \ |
| | build-essential \ |
| | libomp-dev \ |
| | python3-dev \ |
| | wget \ |
| | curl \ |
| | libtorch-dev \ |
| | libboost-all-dev \ |
| | git-lfs \ |
| | && rm -rf /var/lib/apt/lists/* |
| |
|
| | |
| | RUN git lfs install |
| |
|
| | |
| | WORKDIR /content |
| |
|
| | |
| | RUN git clone https://github.com/pytorch/executorch && \ |
| | cd executorch && \ |
| | git checkout dfbf6fd53546eb86e18f2e5cc693d70a82e3b03f && \ |
| | git submodule sync && \ |
| | git submodule update --init |
| |
|
| | |
| | RUN python3 -m pip install --upgrade pip && \ |
| | pip install torch && \ |
| | if [ -f "/content/executorch/requirements.txt" ]; then \ |
| | python3 -m pip install --no-cache-dir -r /content/executorch/requirements.txt; \ |
| | else \ |
| | echo "No requirements.txt found, skipping..."; \ |
| | fi |
| |
|
| | |
| | RUN cd /content/executorch && \ |
| | if [ -f "install_requirements.sh" ]; then \ |
| | bash ./install_requirements.sh --pybind || echo "Warning: Failed to execute install_requirements.sh"; \ |
| | else \ |
| | echo "install_requirements.sh not found, skipping..."; \ |
| | fi && \ |
| | cd /content/executorch/examples/models/llama && \ |
| | if [ -f "install_requirements.sh" ]; then \ |
| | bash ./install_requirements.sh || echo "Warning: Failed to execute llama/install_requirements.sh"; \ |
| | else \ |
| | echo "llama/install_requirements.sh not found, skipping..."; \ |
| | fi |
| |
|
| | |
| | RUN git clone https://huggingface.co/executorch-community/Llama-3.2-1B-Instruct-SpinQuant_INT4_EO8-ET /content/llama-model && \ |
| | cd /content/llama-model && \ |
| | git lfs pull |
| |
|
| | |
| | RUN python3 -m pip install fastapi uvicorn pydantic |
| |
|
| | |
| | COPY app.py /content/app.py |
| |
|
| | |
| | EXPOSE 7860 |
| |
|
| | |
| | CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860"] |
| |
|