mirror of
				https://github.com/ggml-org/llama.cpp.git
				synced 2025-11-04 09:32:00 +00:00 
			
		
		
		
	devops: add server build step
Signed-off-by: Aaron Teo <aaron.teo1@ibm.com>
This commit is contained in:
		@@ -3,8 +3,9 @@ ARG UBUNTU_VERSION=24.10
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
FROM gcc:${GCC_VERSION} AS build
 | 
					FROM gcc:${GCC_VERSION} AS build
 | 
				
			||||||
 | 
					
 | 
				
			||||||
RUN apt-get update && \
 | 
					RUN apt update && \
 | 
				
			||||||
    apt-get install -y git cmake libcurl4-openssl-dev libopenblas-openmp-dev
 | 
					    apt upgrade -y && \
 | 
				
			||||||
 | 
					    apt install -y git cmake libcurl4-openssl-dev libopenblas-openmp-dev
 | 
				
			||||||
 | 
					
 | 
				
			||||||
WORKDIR /app
 | 
					WORKDIR /app
 | 
				
			||||||
COPY . .
 | 
					COPY . .
 | 
				
			||||||
@@ -28,3 +29,27 @@ RUN cp *.py /opt/llama.cpp \
 | 
				
			|||||||
    && cp .devops/tools.sh /opt/llama.cpp/tools.sh
 | 
					    && cp .devops/tools.sh /opt/llama.cpp/tools.sh
 | 
				
			||||||
 | 
					
 | 
				
			||||||
RUN ls -laR /opt/llama.cpp
 | 
					RUN ls -laR /opt/llama.cpp
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					FROM --platform=linux/s390x gcr.io/distroless/cc-debian12:nonroot AS server
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					ENV LLAMA_ARG_HOST=0.0.0.0
 | 
				
			||||||
 | 
					ENV LLAMA_ARG_PORT=8080
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					RUN apt update -y && \
 | 
				
			||||||
 | 
					    apt upgrade -y && \
 | 
				
			||||||
 | 
					    apt install -y libgomp1 curl && \
 | 
				
			||||||
 | 
					    apt autoremove -y && \
 | 
				
			||||||
 | 
					    apt clean -y && \
 | 
				
			||||||
 | 
					    rm -rf /tmp/* /var/tmp/* && \
 | 
				
			||||||
 | 
					    find /var/cache/apt/archives /var/lib/apt/lists -not -name lock -type f -delete && \
 | 
				
			||||||
 | 
					    find /var/cache -type f -delete
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					COPY --from=build /opt/llama.cpp/bin /
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					RUN ls -la | grep llama
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					WORKDIR /models
 | 
				
			||||||
 | 
					USER nonroot:nonroot
 | 
				
			||||||
 | 
					EXPOSE 8080
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					ENTRYPOINT [ "/llama-server" ]
 | 
				
			||||||
 
 | 
				
			|||||||
		Reference in New Issue
	
	Block a user