johnbridges commited on
Commit
0a7eeac
·
1 Parent(s): a8f572d
Files changed (3) hide show
  1. Dockerfile +0 -9
  2. Dockerfile-phi-4 +0 -114
  3. appsettings-phi-4.json +0 -60
Dockerfile CHANGED
@@ -78,22 +78,13 @@ RUN --mount=type=secret,id=GITHUB_TOKEN,mode=0444,required=true \
78
 
79
 
80
  # Copy files into the container as the non-root user
81
- COPY --chown=user:user system_prompt_qwen_3 /home/user/code/models/system_prompt_qwen_3
82
- COPY --chown=user:user system_prompt_qwen_3_run /home/user/code/models/system_prompt_qwen_3_run
83
  COPY --chown=user:user appsettings.json /home/user/code/app/appsettings.json
84
  COPY --chown=user:user index.html /home/user/code/app/wwwroot/index.html
85
- COPY --chown=user:user append_run.sh /home/user/code/models/append_run.sh
86
- COPY --chown=user:user expect-build-qwen-3 /home/user/code/models/expect-build-qwen-3
87
 
88
- # Set permissions for scripts as the non-root user
89
- RUN chmod +x /home/user/code/models/append_run.sh && \
90
- chmod +x /home/user/code/models/expect-build-qwen-3
91
 
92
  # Set the working directory for the build-qwen-3 script
93
  WORKDIR /home/user/code/models
94
 
95
- # Run the build-qwen-3 script
96
- # RUN ./expect-build-qwen-3
97
 
98
  # Expose port 7860 for Hugging Face Spaces
99
  EXPOSE 7860
 
78
 
79
 
80
  # Copy files into the container as the non-root user
 
 
81
  COPY --chown=user:user appsettings.json /home/user/code/app/appsettings.json
82
  COPY --chown=user:user index.html /home/user/code/app/wwwroot/index.html
 
 
83
 
 
 
 
84
 
85
  # Set the working directory for the build-qwen-3 script
86
  WORKDIR /home/user/code/models
87
 
 
 
88
 
89
  # Expose port 7860 for Hugging Face Spaces
90
  EXPOSE 7860
Dockerfile-phi-4 DELETED
@@ -1,114 +0,0 @@
1
- # Use the official Debian 12 (Bookworm) base image
2
- FROM debian:13
3
-
4
- # Set environment variables to avoid interactive prompts during package installation
5
- ENV DEBIAN_FRONTEND=noninteractive
6
-
7
- # Install system-level dependencies as root
8
- RUN apt-get update && \
9
- apt-get install -y \
10
- build-essential \
11
- curl \
12
- git \
13
- cmake \
14
- clang \
15
- pkg-config \
16
- ccache \
17
- wget \
18
- vim \
19
- libicu76 \
20
- expect
21
-
22
- # Create a non-root user and set up their environment
23
- RUN useradd -m user && \
24
- mkdir -p /home/user/code && \
25
- chown -R user:user /home/user
26
-
27
- # Switch to the non-root user
28
- USER user
29
- WORKDIR /home/user
30
-
31
- RUN mkdir -p /home/user/code/models && \
32
- mkdir -p /home/user/code/app/wwwroot && \
33
- cd /home/user/code/models && \
34
- wget -q https://huggingface.co/Mungert/Phi-4-mini-instruct.gguf/resolve/main/phi-4-mini-q4_0.gguf
35
-
36
-
37
- # Clone and build OpenBLAS as the non-root user
38
- RUN git clone https://github.com/OpenMathLib/OpenBLAS.git /home/user/code/models/OpenBLAS && \
39
- cd /home/user/code/models/OpenBLAS && \
40
- make -j2 > build.log 2>&1 || (tail -20 build.log && false)
41
-
42
- # Switch to root for the OpenBLAS installation
43
- USER root
44
- RUN cd /home/user/code/models/OpenBLAS && \
45
- make install > install.log 2>&1 || (tail -20 install.log && false) && \
46
- cp /opt/OpenBLAS/lib/libopenblas* /usr/local/lib/
47
-
48
- # Switch back to the non-root user
49
- USER user
50
-
51
- # Clone and build llama.cpp with OpenBLAS support as the non-root user
52
- RUN git clone https://github.com/ggerganov/llama.cpp /home/user/code/models/llama.cpp && \
53
- cd /home/user/code/models/llama.cpp && \
54
- export PKG_CONFIG_PATH=/opt/OpenBLAS/lib/pkgconfig:$PKG_CONFIG_PATHa && \
55
- cmake -B build -DGGML_BLAS=ON -DGGML_BLAS_VENDOR=OpenBLAS -DBLAS_INCLUDE_DIRS=/home/user/code/models/OpenBLAS -DLLAMA_CURL=OFF && \
56
- cmake --build build --config Release -j2 && \
57
- cp /home/user/code/models/llama.cpp/build/bin/* /home/user/code/models/llama.cpp/
58
-
59
-
60
-
61
- # Install .NET 10.0 as the non-root user
62
- RUN wget https://dot.net/v1/dotnet-install.sh -O dotnet-install.sh && \
63
- chmod +x dotnet-install.sh && \
64
- ./dotnet-install.sh --channel 10.0
65
-
66
- # Set persistent environment variables
67
- ENV DOTNET_ROOT=/home/user/.dotnet
68
- ENV PATH=$PATH:$DOTNET_ROOT:$DOTNET_ROOT/tools
69
-
70
- # Verify .NET installation and current user
71
- RUN whoami && dotnet --version
72
-
73
- # Clone repositories using the GITHUB_TOKEN secret
74
- RUN --mount=type=secret,id=GITHUB_TOKEN,mode=0444,required=true \
75
- git clone https://$(cat /run/secrets/GITHUB_TOKEN)@github.com/Mungert69/NetworkMonitorLib.git /home/user/code/NetworkMonitorLib && \
76
- git clone https://$(cat /run/secrets/GITHUB_TOKEN)@github.com/Mungert69/NetworkMonitorLLM.git /home/user/code/NetworkMonitorLLM && \
77
- git clone https://$(cat /run/secrets/GITHUB_TOKEN)@github.com/Mungert69/NetworkMonitorData.git /home/user/code/NetworkMonitorData
78
-
79
-
80
- # Copy files into the container as the non-root user
81
- COPY --chown=user:user system_prompt_phi_4_mini /home/user/code/models/system_prompt_phi_4_mini
82
- COPY --chown=user:user system_prompt_phi_4_mini_run /home/user/code/models/system_prompt_phi_4_mini_run
83
- COPY --chown=user:user appsettings.json /home/user/code/app/appsettings.json
84
- COPY --chown=user:user index.html /home/user/code/app/wwwroot/index.html
85
- COPY --chown=user:user append_run.sh /home/user/code/models/append_run.sh
86
- COPY --chown=user:user expect-build-phi-4-mini /home/user/code/models/expect-build-phi-4-mini
87
-
88
- # Set permissions for scripts as the non-root user
89
- RUN chmod +x /home/user/code/models/append_run.sh && \
90
- chmod +x /home/user/code/models/expect-build-phi-4-mini
91
-
92
- # Set the working directory for the build-phi-4-mini script
93
- WORKDIR /home/user/code/models
94
-
95
- # Run the build-phi-4-mini script
96
- RUN ./expect-build-phi-4-mini
97
-
98
- # Expose port 7860 for Hugging Face Spaces
99
- EXPOSE 7860
100
- # Set the working directory
101
- WORKDIR /home/user/code/NetworkMonitorLLM
102
-
103
- # Build the .NET project as the non-root user
104
- RUN dotnet restore && \
105
- dotnet build -c Release
106
-
107
- RUN cp -r /home/user/code/NetworkMonitorLLM/bin/Release/net10.0/* /home/user/code/app/ && \
108
- rm -rf /home/user/code/NetworkMonitorLib /home/user/code/NetworkMonitorLLM /home/user/code/NetworkMonitorData
109
-
110
- # Set the working directory to the `app` directory
111
- WORKDIR /home/user/code/app
112
-
113
- # Run the .NET app as the non-root user
114
- CMD ["dotnet", "NetworkMonitorLLM.dll", "--urls", "http://0.0.0.0:7860"]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
appsettings-phi-4.json DELETED
@@ -1,60 +0,0 @@
1
- {
2
- "Logging": {
3
- "LogLevel": {
4
- "Default": "Information",
5
- "Microsoft": "Warning",
6
- "Microsoft.Hosting.Lifetime": "Information"
7
- }
8
- },
9
- "OpenAIApiKey" :".env",
10
- "EmailEncryptKey": ".env",
11
- "LocalSystemUrl": {
12
- "ExternalUrl": "https://asmonitorsrv.readyforquantum.com",
13
- "IPAddress": "",
14
- "RabbitHostName": "rabbitmq.readyforquantum.com",
15
- "RabbitPort": 55672,
16
- "RabbitInstanceName" : "ASSrv-LLMService",
17
- "RabbitUserName" : "usercommonxf1",
18
- "RabbitVHost" : "/vhostuser",
19
- "UseTls" : true
20
- },
21
- "ServiceID" : "monitor",
22
- "StartThisTestLLM" : true,
23
- "LlmNoInitMessage" : false,
24
- "ServiceAuthKey" : ".env" ,
25
- "LlmModelPath" : "/home/user/code/models/",
26
- "LlmModelFileName" : "phi-4-mini-q4_0.gguf",
27
- "LlmContextFileName" : "context-phi-4-mini.gguf",
28
- "LlmSystemPrompt" : "system_prompt_phi_4_mini_run",
29
- "LlmPromptMode" : " -if -sp -no-cnv --simple-io ",
30
- "LlmVersion" : "phi_4_mini",
31
- "LlmCtxSize" : 12000,
32
- "LlmOpenAICtxSize" : 32000,
33
- "LlmCtxRatio" : 6,
34
- "LlmTemp" : "0.3",
35
- "LlmThreads" : 2,
36
- "LlmUserPromptTimeout" : 1200,
37
- "LlmSessionIdleTimeout" : 1440,
38
- "LlmGptModel" : "gpt-4.1-mini",
39
- "LlmHFModelID" : "meta-llama/llama-3.3-70b-instruct",
40
- "LlmHFKey" : ".env",
41
- "LlmHFUrl" : "https://api.novita.ai/v3/openai/chat/completions",
42
- "LlmHFModelVersion" : "llama_3.2",
43
- "IsStream" : false,
44
- "UseFixedPort" : true,
45
- "HFToken" : ".env",
46
- "DataRepoId" : "mungert/NetMonLLMDataLive",
47
- "RedisUrl" :"redis.readyforquantum.co.uk:46379",
48
- "MaxRetries" : -1,
49
- "RabbitPassword" : ".env",
50
- "RabbitRoutingKey": "execute.local",
51
- "RabbitExchangeType": "direct",
52
- "LlmSystemPromptTimeout": 120,
53
- "LlmSpaceModelID": "Qwen/Qwen3-1.7B",
54
- "LlmUseHF": false,
55
- "AudioServiceUrls": ["https://devtranscribe.readyforquantum.com","https://devtranscribe2.readyforquantum.com","https://devtranscribe3.readyforquantum.com"],
56
- "REDIS_PASSWORD" :".env",
57
- "UseTls" : true,
58
-
59
- }
60
-