Skip to content

Commit 73edf7c

Browse files
committed
feat(build): only build llama.cpp relevant targets
Signed-off-by: Ettore Di Giacinto <[email protected]>
1 parent 59af0e7 commit 73edf7c

File tree

2 files changed

+7
-4
lines changed

2 files changed

+7
-4
lines changed

Makefile

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,7 @@ override LD_FLAGS += -X "github.com/go-skynet/LocalAI/internal.Commit=$(shell gi
5454

5555
OPTIONAL_TARGETS?=
5656

57-
OS := $(shell uname -s)
57+
export OS := $(shell uname -s)
5858
ARCH := $(shell uname -m)
5959
GREEN := $(shell tput -Txterm setaf 2)
6060
YELLOW := $(shell tput -Txterm setaf 3)
@@ -764,7 +764,7 @@ backend-assets/grpc/llama-cpp-grpc: backend-assets/grpc
764764
cp -rf backend/cpp/llama backend/cpp/llama-grpc
765765
$(MAKE) -C backend/cpp/llama-grpc purge
766766
$(info ${GREEN}I llama-cpp build info:grpc${RESET})
767-
CMAKE_ARGS="$(CMAKE_ARGS) -DLLAMA_RPC=ON -DLLAMA_AVX=off -DLLAMA_AVX2=off -DLLAMA_AVX512=off -DLLAMA_FMA=off -DLLAMA_F16C=off" $(MAKE) VARIANT="llama-grpc" build-llama-cpp-grpc-server
767+
CMAKE_ARGS="$(CMAKE_ARGS) -DLLAMA_RPC=ON -DLLAMA_AVX=off -DLLAMA_AVX2=off -DLLAMA_AVX512=off -DLLAMA_FMA=off -DLLAMA_F16C=off" TARGET="--target grpc-server --target rpc-server" $(MAKE) VARIANT="llama-grpc" build-llama-cpp-grpc-server
768768
cp -rfv backend/cpp/llama-grpc/grpc-server backend-assets/grpc/llama-cpp-grpc
769769

770770
backend-assets/util/llama-cpp-rpc-server: backend-assets/grpc/llama-cpp-grpc

backend/cpp/llama/Makefile

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@ LLAMA_VERSION?=
44
CMAKE_ARGS?=
55
BUILD_TYPE?=
66
ONEAPI_VARS?=/opt/intel/oneapi/setvars.sh
7+
TARGET?=--target grpc-server
78

89
# If build type is cublas, then we set -DLLAMA_CUBLAS=ON to CMAKE_ARGS automatically
910
ifeq ($(BUILD_TYPE),cublas)
@@ -23,6 +24,8 @@ else ifeq ($(BUILD_TYPE),hipblas)
2324
else ifeq ($(OS),darwin)
2425
ifneq ($(BUILD_TYPE),metal)
2526
CMAKE_ARGS+=-DLLAMA_METAL=OFF
27+
else
28+
TARGET+=--target ggml-metal
2629
endif
2730
endif
2831

@@ -62,8 +65,8 @@ grpc-server: llama.cpp llama.cpp/examples/grpc-server
6265
@echo "Building grpc-server with $(BUILD_TYPE) build type and $(CMAKE_ARGS)"
6366
ifneq (,$(findstring sycl,$(BUILD_TYPE)))
6467
bash -c "source $(ONEAPI_VARS); \
65-
cd llama.cpp && mkdir -p build && cd build && cmake .. $(CMAKE_ARGS) && $(MAKE)"
68+
cd llama.cpp && mkdir -p build && cd build && cmake .. $(CMAKE_ARGS) && cmake --build . --config Release $(TARGET)"
6669
else
67-
cd llama.cpp && mkdir -p build && cd build && cmake .. $(CMAKE_ARGS) && $(MAKE)
70+
cd llama.cpp && mkdir -p build && cd build && cmake .. $(CMAKE_ARGS) && cmake --build . --config Release $(TARGET)
6871
endif
6972
cp llama.cpp/build/bin/grpc-server .

0 commit comments

Comments
 (0)