|
| 1 | +# SPDX-License-Identifier: Apache-2.0 |
| 2 | +# SPDX-FileCopyrightText: Copyright contributors to the vLLM-FL project |
| 3 | +# |
| 4 | +# vLLM-FL C++ extensions - Root CMakeLists.txt |
| 5 | + |
| 6 | +cmake_minimum_required(VERSION 3.26) |
| 7 | +project(vllm_fl_extensions LANGUAGES CXX) |
| 8 | + |
| 9 | +set(CMAKE_CXX_STANDARD 17) |
| 10 | +set(CMAKE_CXX_STANDARD_REQUIRED ON) |
| 11 | + |
| 12 | +# ============================================================================= |
| 13 | +# Vendor Selection (REQUIRED - no auto-detection) |
| 14 | +# ============================================================================= |
| 15 | + |
| 16 | +if(NOT DEFINED VLLM_VENDOR) |
| 17 | + if(DEFINED ENV{VLLM_VENDOR}) |
| 18 | + set(VLLM_VENDOR $ENV{VLLM_VENDOR}) |
| 19 | + endif() |
| 20 | +endif() |
| 21 | + |
| 22 | +if(NOT VLLM_VENDOR) |
| 23 | + message(FATAL_ERROR |
| 24 | + "VLLM_VENDOR is required but not specified.\n" |
| 25 | + "Please set VLLM_VENDOR environment variable or cmake option:\n" |
| 26 | + " export VLLM_VENDOR=cuda # For NVIDIA CUDA\n" |
| 27 | + " export VLLM_VENDOR=ascend # For Huawei Ascend\n" |
| 28 | + "\n" |
| 29 | + "Or pass to cmake:\n" |
| 30 | + " cmake -DVLLM_VENDOR=cuda .." |
| 31 | + ) |
| 32 | +endif() |
| 33 | + |
| 34 | +set(SUPPORTED_VENDORS cuda ascend) |
| 35 | +if(NOT VLLM_VENDOR IN_LIST SUPPORTED_VENDORS) |
| 36 | + message(FATAL_ERROR |
| 37 | + "Unsupported vendor: ${VLLM_VENDOR}\n" |
| 38 | + "Supported vendors: ${SUPPORTED_VENDORS}" |
| 39 | + ) |
| 40 | +endif() |
| 41 | + |
| 42 | +message(STATUS "==============================================") |
| 43 | +message(STATUS "vLLM-FL Extensions: ${VLLM_VENDOR}") |
| 44 | +message(STATUS "==============================================") |
| 45 | + |
| 46 | +# ============================================================================= |
| 47 | +# Find Python |
| 48 | +# ============================================================================= |
| 49 | + |
| 50 | +if(VLLM_PYTHON_EXECUTABLE) |
| 51 | + set(Python_EXECUTABLE ${VLLM_PYTHON_EXECUTABLE}) |
| 52 | +endif() |
| 53 | + |
| 54 | +find_package(Python REQUIRED COMPONENTS Interpreter Development.Module) |
| 55 | +message(STATUS "Python: ${Python_EXECUTABLE} (${Python_VERSION})") |
| 56 | + |
| 57 | +# ============================================================================= |
| 58 | +# Find PyTorch |
| 59 | +# ============================================================================= |
| 60 | + |
| 61 | +execute_process( |
| 62 | + COMMAND ${Python_EXECUTABLE} -c "import torch; print(torch.utils.cmake_prefix_path)" |
| 63 | + OUTPUT_VARIABLE TORCH_CMAKE_PREFIX |
| 64 | + OUTPUT_STRIP_TRAILING_WHITESPACE |
| 65 | +) |
| 66 | +list(APPEND CMAKE_PREFIX_PATH ${TORCH_CMAKE_PREFIX}) |
| 67 | + |
| 68 | +find_package(Torch REQUIRED) |
| 69 | +message(STATUS "PyTorch: ${Torch_VERSION}") |
| 70 | + |
| 71 | +# ============================================================================= |
| 72 | +# Include directories |
| 73 | +# ============================================================================= |
| 74 | + |
| 75 | +include_directories(${CMAKE_CURRENT_SOURCE_DIR}) |
| 76 | + |
| 77 | +# ============================================================================= |
| 78 | +# Build Vendor Backend |
| 79 | +# ============================================================================= |
| 80 | + |
| 81 | +add_subdirectory(${VLLM_VENDOR}) |
0 commit comments