InfosysResponsibleAiToolKit's picture
Add .whl and punkt.zip with LFS
a1ca9d7
FROM python:3.10
# Set environment variables for Hugging Face cache
ENV TRANSFORMERS_CACHE=/tmp/transformers_cache
ENV HF_HOME=/tmp/huggingface_cache
RUN rm -rf ~/.cache/huggingface/transformers
# Install system dependencies
RUN apt-get update && apt-get install -y git wget git-lfs && git lfs install && apt-get clean
# Create and use a non-root user
RUN useradd -m -u 1000 user
RUN mkdir /ModerationModelCode && chown -R user:user /ModerationModelCode
USER user
ENV PATH="/home/user/.local/bin:$PATH"
RUN pwd
# Clone repo with LFS support
USER root
RUN pwd
RUN ls
RUN GIT_LFS_SKIP_SMUDGE=0 git clone --depth 1 https://huggingface.co/InfosysEnterprise/ModerationModelCode /ModerationModelCode
RUN git config --global --add safe.directory /ModerationModelCode
RUN cd /ModerationModelCode && git log -1 --oneline && rm -rf .git
USER user
# Inspect models directory
RUN ls -lR /ModerationModelCode/models
RUN find /ModerationModelCode/models -type f -exec ls -lh {} +
# Copy remaining code
COPY --chown=user . /ModerationModelCode
WORKDIR /ModerationModelCode
RUN pip install --no-cache-dir \
presidio-analyzer==2.2.355
# Install dependencies from Hugging Face / pip
#RUN pip install --no-cache-dir \
## torch==2.2.0 torchvision==0.17.0 --index-url https://download.pytorch.org/whl/cpu \
## transformers==4.48.0 diffusers==0.27.2
# Install .whl files directly from Hugging Face (replace URLs with actual repo/raw links)
RUN pip install --no-cache-dir https://huggingface.co/InfosysEnterprise/ModerationModelCode/resolve/main/lib/privacy-2.0.8-py3-none-any.whl
RUN pip install --no-cache-dir https://huggingface.co/InfosysEnterprise/ModerationModelCode/resolve/main/lib/en_core_web_lg-3.5.0-py3-none-any.whl
# Install additional requirements
RUN pip install --user --no-cache-dir -r /ModerationModelCode/requirements/requirement.txt
WORKDIR /ModerationModelCode/src
# Expose Hugging Face Spaces port
EXPOSE 7860
# Start the app
CMD ["python", "main_MM.py"]