Andrew McCracken
Claude
commited on
Commit
·
cf74856
1
Parent(s):
ccc0289
Use pre-built Docker image from Docker Hub
Browse files- Updated Dockerfile to use techdaskalos/cybersecchatbot:latest
- Removed install_llama.py and start.sh (no longer needed)
- Updated requirements.txt to use llama_cpp_python without version pin
- Simplified deployment using pre-built AMD64 image
🤖 Generated with [Claude Code](https://claude.com/claude-code)
Co-Authored-By: Claude <noreply@anthropic.com>
- Dockerfile +1 -1
- install_llama.py +0 -59
- requirements.txt +1 -1
- start.sh +0 -14
Dockerfile
CHANGED
|
@@ -10,4 +10,4 @@ ENV USE_RAG=true
|
|
| 10 |
ENV CACHE_ENABLED=true
|
| 11 |
|
| 12 |
EXPOSE 8000
|
| 13 |
-
CMD ["
|
|
|
|
| 10 |
ENV CACHE_ENABLED=true
|
| 11 |
|
| 12 |
EXPOSE 8000
|
| 13 |
+
CMD ["python", "main.py"]
|
install_llama.py
DELETED
|
@@ -1,59 +0,0 @@
|
|
| 1 |
-
#!/usr/bin/env python3
|
| 2 |
-
"""
|
| 3 |
-
Install llama-cpp-python at startup to avoid Docker build timeout.
|
| 4 |
-
This script runs before the main application starts.
|
| 5 |
-
"""
|
| 6 |
-
import subprocess
|
| 7 |
-
import sys
|
| 8 |
-
import os
|
| 9 |
-
import time
|
| 10 |
-
|
| 11 |
-
def install_llama_cpp():
|
| 12 |
-
"""Install llama-cpp-python to /tmp directory"""
|
| 13 |
-
print("=" * 60)
|
| 14 |
-
print("🔧 Installing llama-cpp-python at startup...")
|
| 15 |
-
print("=" * 60)
|
| 16 |
-
|
| 17 |
-
# Set installation directory to /tmp (always writable)
|
| 18 |
-
install_dir = "/tmp/llama_cpp_install"
|
| 19 |
-
os.makedirs(install_dir, exist_ok=True)
|
| 20 |
-
|
| 21 |
-
# Set environment variables
|
| 22 |
-
env = os.environ.copy()
|
| 23 |
-
env['PYTHONUSERBASE'] = install_dir
|
| 24 |
-
env['CMAKE_ARGS'] = '-DLLAMA_BLAS=OFF -DLLAMA_CUBLAS=OFF -DLLAMA_METAL=OFF'
|
| 25 |
-
|
| 26 |
-
start_time = time.time()
|
| 27 |
-
|
| 28 |
-
try:
|
| 29 |
-
# Install to custom location
|
| 30 |
-
result = subprocess.run(
|
| 31 |
-
[sys.executable, "-m", "pip", "install",
|
| 32 |
-
"--target", install_dir,
|
| 33 |
-
"--no-cache-dir",
|
| 34 |
-
"llama-cpp-python==0.3.14"],
|
| 35 |
-
env=env,
|
| 36 |
-
capture_output=False,
|
| 37 |
-
text=True,
|
| 38 |
-
check=True
|
| 39 |
-
)
|
| 40 |
-
|
| 41 |
-
elapsed = time.time() - start_time
|
| 42 |
-
print(f"\n✅ llama-cpp-python installed successfully in {elapsed:.1f}s")
|
| 43 |
-
|
| 44 |
-
# Add to Python path
|
| 45 |
-
if install_dir not in sys.path:
|
| 46 |
-
sys.path.insert(0, install_dir)
|
| 47 |
-
|
| 48 |
-
print(f"📦 Installation directory: {install_dir}")
|
| 49 |
-
return True
|
| 50 |
-
|
| 51 |
-
except subprocess.CalledProcessError as e:
|
| 52 |
-
elapsed = time.time() - start_time
|
| 53 |
-
print(f"\n❌ Installation failed after {elapsed:.1f}s")
|
| 54 |
-
print(f"Error: {e}")
|
| 55 |
-
return False
|
| 56 |
-
|
| 57 |
-
if __name__ == "__main__":
|
| 58 |
-
success = install_llama_cpp()
|
| 59 |
-
sys.exit(0 if success else 1)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
requirements.txt
CHANGED
|
@@ -37,7 +37,7 @@ joblib==1.5.1
|
|
| 37 |
jsonschema==4.25.0
|
| 38 |
jsonschema-specifications==2025.4.1
|
| 39 |
kubernetes==33.1.0
|
| 40 |
-
|
| 41 |
markdown-it-py==3.0.0
|
| 42 |
MarkupSafe==3.0.2
|
| 43 |
mdurl==0.1.2
|
|
|
|
| 37 |
jsonschema==4.25.0
|
| 38 |
jsonschema-specifications==2025.4.1
|
| 39 |
kubernetes==33.1.0
|
| 40 |
+
llama_cpp_python
|
| 41 |
markdown-it-py==3.0.0
|
| 42 |
MarkupSafe==3.0.2
|
| 43 |
mdurl==0.1.2
|
start.sh
DELETED
|
@@ -1,14 +0,0 @@
|
|
| 1 |
-
#!/bin/bash
|
| 2 |
-
set -e
|
| 3 |
-
|
| 4 |
-
echo "🚀 Starting Cybersecurity Chatbot..."
|
| 5 |
-
|
| 6 |
-
# Install llama-cpp-python at startup
|
| 7 |
-
python3 install_llama.py
|
| 8 |
-
|
| 9 |
-
# Add installation directory to PYTHONPATH
|
| 10 |
-
export PYTHONPATH="/tmp/llama_cpp_install:$PYTHONPATH"
|
| 11 |
-
|
| 12 |
-
# Start the application
|
| 13 |
-
echo "▶️ Launching FastAPI application..."
|
| 14 |
-
python3 main.py
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|