diff --git a/backend/cpp/llama/CMakeLists.txt b/backend/cpp/llama/CMakeLists.txt index 5a17d3d31779..a903b53fed0f 100644 --- a/backend/cpp/llama/CMakeLists.txt +++ b/backend/cpp/llama/CMakeLists.txt @@ -38,11 +38,6 @@ find_package(absl CONFIG REQUIRED) find_package(Protobuf CONFIG REQUIRED) find_package(gRPC CONFIG REQUIRED) -if ( DEFINED ENV{ONEAPI_ROOT}) - find_package(MKL REQUIRED) - set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -L${MKLROOT}/lib") -endif() - find_program(_PROTOBUF_PROTOC protoc) set(_GRPC_GRPCPP grpc++) find_program(_GRPC_CPP_PLUGIN_EXECUTABLE grpc_cpp_plugin) @@ -88,7 +83,6 @@ target_link_libraries(${TARGET} PRIVATE common llama myclip ${CMAKE_THREAD_LIBS_ # Conditionally link SYCL to grpc-server if ( DEFINED ENV{ONEAPI_ROOT}) - find_package(IntelSYCL REQUIRED) target_link_libraries(${TARGET} PRIVATE common llama myclip ${CMAKE_THREAD_LIBS_INIT} absl::flags hw_grpc_proto absl::flags_parse gRPC::${_REFLECTION} diff --git a/backend/cpp/llama/CMakeLists.txt.rpc-8662 b/backend/cpp/llama/CMakeLists.txt.rpc-8662 new file mode 100644 index 000000000000..82bbedcaa838 --- /dev/null +++ b/backend/cpp/llama/CMakeLists.txt.rpc-8662 @@ -0,0 +1,8 @@ +# https://github.com/ggerganov/llama.cpp/issues/8665 + +add_executable(rpc-server rpc-server.cpp) +if ( DEFINED ENV{ONEAPI_ROOT}) +target_link_libraries(rpc-server PRIVATE ggml llama sycl) +else() +target_link_libraries(rpc-server PRIVATE ggml llama) +endif() \ No newline at end of file diff --git a/backend/cpp/llama/prepare.sh b/backend/cpp/llama/prepare.sh index 6c00f27caa38..c1c94ce6c9d6 100644 --- a/backend/cpp/llama/prepare.sh +++ b/backend/cpp/llama/prepare.sh @@ -17,4 +17,7 @@ cp -rfv llama.cpp/examples/llava/clip.h llama.cpp/examples/grpc-server/clip.h cp -rfv llama.cpp/examples/llava/llava.cpp llama.cpp/examples/grpc-server/llava.cpp echo '#include "llama.h"' > llama.cpp/examples/grpc-server/llava.h cat llama.cpp/examples/llava/llava.h >> llama.cpp/examples/grpc-server/llava.h -cp -rfv llama.cpp/examples/llava/clip.cpp llama.cpp/examples/grpc-server/clip.cpp \ No newline at end of file +cp -rfv llama.cpp/examples/llava/clip.cpp llama.cpp/examples/grpc-server/clip.cpp + +# https://github.com/ggerganov/llama.cpp/issues/8665 +cp -rfv CMakeLists.txt.rpc-8662 llama.cpp/examples/rpc/CMakeLists.txt \ No newline at end of file