Skip to content

Commit

Permalink
Move file prep from build.sh to cmake (#3014)
Browse files Browse the repository at this point in the history
* Move file prep from build.sh to cmake

* Remove make format call from build.sh
  • Loading branch information
mreso authored Mar 12, 2024
1 parent dcca135 commit a73a08a
Show file tree
Hide file tree
Showing 7 changed files with 58 additions and 45 deletions.
41 changes: 0 additions & 41 deletions cpp/build.sh
Original file line number Diff line number Diff line change
Expand Up @@ -20,45 +20,6 @@ function detect_platform() {
echo -e "${COLOR_GREEN}Detected platform: $PLATFORM ${COLOR_OFF}"
}

function prepare_test_files() {
echo -e "${COLOR_GREEN}[ INFO ]Preparing test files ${COLOR_OFF}"
local EX_DIR="${TR_DIR}/examples/"
rsync -a --link-dest=../../test/resources/ ${BASE_DIR}/test/resources/ ${TR_DIR}/
if [ ! -f "${EX_DIR}/babyllama/babyllama_handler/tokenizer.bin" ]; then
wget -q https://github.com/karpathy/llama2.c/raw/master/tokenizer.bin -O "${EX_DIR}/babyllama/babyllama_handler/tokenizer.bin"
fi
if [ ! -f "${EX_DIR}/babyllama/babyllama_handler/stories15M.bin" ]; then
wget -q https://huggingface.co/karpathy/tinyllamas/resolve/main/stories15M.bin -O "${EX_DIR}/babyllama/babyllama_handler/stories15M.bin"
fi
# PT2.2 torch.expport does not support Mac
if [ "$PLATFORM" = "Linux" ]; then
if [ ! -f "${EX_DIR}/aot_inductor/llama_handler/stories15M.so" ]; then
local HANDLER_DIR=${EX_DIR}/aot_inductor/llama_handler/
if [ ! -f "${HANDLER_DIR}/stories15M.pt" ]; then
wget -q https://huggingface.co/karpathy/tinyllamas/resolve/main/stories15M.pt?download=true -O "${HANDLER_DIR}/stories15M.pt"
fi
local LLAMA_SO_DIR=${BASE_DIR}/third-party/llama2.so/
PYTHONPATH=${LLAMA_SO_DIR}:${PYTHONPATH} python ${BASE_DIR}/../examples/cpp/aot_inductor/llama2/compile.py --checkpoint ${HANDLER_DIR}/stories15M.pt ${HANDLER_DIR}/stories15M.so
fi
if [ ! -f "${EX_DIR}/aot_inductor/bert_handler/bert-seq.so" ]; then
pip install transformers
local HANDLER_DIR=${EX_DIR}/aot_inductor/bert_handler/
export TOKENIZERS_PARALLELISM=false
cd ${BASE_DIR}/../examples/cpp/aot_inductor/bert/
python aot_compile_export.py
mv bert-seq.so ${HANDLER_DIR}/bert-seq.so
mv Transformer_model/tokenizer.json ${HANDLER_DIR}/tokenizer.json
export TOKENIZERS_PARALLELISM=""
fi
if [ ! -f "${EX_DIR}/aot_inductor/resnet_handler/resnet50_pt2.so" ]; then
local HANDLER_DIR=${EX_DIR}/aot_inductor/resnet_handler/
cd ${HANDLER_DIR}
python ${BASE_DIR}/../examples/cpp/aot_inductor/resnet/resnet50_torch_export.py
fi
fi
cd "$BWD" || exit
}

function build() {
echo -e "${COLOR_GREEN}[ INFO ]Building backend ${COLOR_OFF}"
MAYBE_BUILD_QUIC=""
Expand Down Expand Up @@ -121,7 +82,6 @@ function build() {
fi

make -j "$JOBS"
make format
make install
echo -e "${COLOR_GREEN}torchserve_cpp build is complete. To run unit test: \
./_build/test/torchserve_cpp_test ${COLOR_OFF}"
Expand Down Expand Up @@ -207,6 +167,5 @@ cd $BASE_DIR

git submodule update --init --recursive

prepare_test_files
build
install_torchserve_cpp
2 changes: 2 additions & 0 deletions cpp/src/backends/core/backend.cc
Original file line number Diff line number Diff line change
Expand Up @@ -22,12 +22,14 @@ bool Backend::Initialize(const std::string &model_dir) {
// TODO: windows
TS_LOGF(DEBUG, "Initializing from manifest: {}", manifest_file);
if (!manifest_->Initialize(manifest_file)) {
TS_LOGF(ERROR, "Could not initialize from manifest: {}", manifest_file);
return false;
}

LoadHandler(model_dir);

if (!handler_) {
TS_LOG(ERROR, "Could not load handler");
return false;
}

Expand Down
4 changes: 3 additions & 1 deletion cpp/src/examples/CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@

file(COPY ${CMAKE_CURRENT_SOURCE_DIR}/../../test/resources/ DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/../../test/resources/)

add_subdirectory("../../../examples/cpp/babyllama/" "${CMAKE_CURRENT_BINARY_DIR}/../../test/resources/examples/babyllama/babyllama_handler/")

add_subdirectory("../../../examples/cpp/llamacpp/" "${CMAKE_CURRENT_BINARY_DIR}/../../test/resources/examples/llamacpp/llamacpp_handler/")
Expand All @@ -10,6 +12,6 @@ if(CMAKE_SYSTEM_NAME MATCHES "Linux")
add_subdirectory("../../../examples/cpp/aot_inductor/llama2/" "${CMAKE_CURRENT_BINARY_DIR}/../../test/resources/examples/aot_inductor/llama_handler/")

add_subdirectory("../../../examples/cpp/aot_inductor/bert" "${CMAKE_CURRENT_BINARY_DIR}/../../test/resources/examples/aot_inductor/bert_handler/")

add_subdirectory("../../../examples/cpp/aot_inductor/resnet" "${CMAKE_CURRENT_BINARY_DIR}/../../test/resources/examples/aot_inductor/resnet_handler/")
endif()
10 changes: 9 additions & 1 deletion examples/cpp/aot_inductor/bert/CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,13 @@

add_custom_command(
OUTPUT bert-seq.so
COMMAND TOKENIZERS_PARALLELISM=false python ${CMAKE_CURRENT_SOURCE_DIR}/aot_compile_export.py
COMMAND cp ${CMAKE_CURRENT_BINARY_DIR}/Transformer_model/tokenizer.json ${CMAKE_CURRENT_BINARY_DIR}/
DEPENDS ${CMAKE_CURRENT_SOURCE_DIR}/aot_compile_export.py
)

set(TOKENZIER_CPP_PATH ${CMAKE_CURRENT_SOURCE_DIR}/../../../../cpp/third-party/tokenizers-cpp)
add_subdirectory(${TOKENZIER_CPP_PATH} tokenizers EXCLUDE_FROM_ALL)
add_library(bert_handler SHARED src/bert_handler.cc)
add_library(bert_handler SHARED src/bert_handler.cc bert-seq.so)
target_include_directories(bert_handler PRIVATE ${TOKENZIER_CPP_PATH}/include)
target_link_libraries(bert_handler PRIVATE ts_backends_core ts_utils ${TORCH_LIBRARIES} tokenizers_cpp)
20 changes: 19 additions & 1 deletion examples/cpp/aot_inductor/llama2/CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,23 @@

FetchContent_Declare(
stories15M_pt
URL https://huggingface.co/karpathy/tinyllamas/resolve/main/stories15M.pt?download=true
DOWNLOAD_NO_EXTRACT TRUE
DOWNLOAD_DIR ${CMAKE_CURRENT_BINARY_DIR}/
)

FetchContent_MakeAvailable(stories15M_pt)


add_custom_command(
OUTPUT stories15M.so
COMMAND PYTHONPATH=${CMAKE_CURRENT_SOURCE_DIR}/../../../../cpp/third-party/llama2.so/ python ${CMAKE_CURRENT_SOURCE_DIR}/compile.py --checkpoint ${CMAKE_CURRENT_BINARY_DIR}/\'stories15M.pt?download=true\' ${CMAKE_CURRENT_BINARY_DIR}/stories15M.so
DEPENDS ${CMAKE_CURRENT_SOURCE_DIR}/compile.py
)


add_library(llama2_so STATIC ../../../../cpp/third-party/llama2.so/run.cpp)
target_compile_options(llama2_so PRIVATE -Wall -Wextra -Ofast -fpermissive)

add_library(llama_so_handler SHARED src/llama_handler.cc)
add_library(llama_so_handler SHARED src/llama_handler.cc stories15M.so)
target_link_libraries(llama_so_handler PRIVATE llama2_so ts_backends_core ts_utils ${TORCH_LIBRARIES})
9 changes: 8 additions & 1 deletion examples/cpp/aot_inductor/resnet/CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -1,2 +1,9 @@
add_library(resnet_handler SHARED src/resnet_handler.cc)

add_custom_command(
OUTPUT resnet50_pt2.so
COMMAND python ${CMAKE_CURRENT_SOURCE_DIR}/resnet50_torch_export.py
DEPENDS ${CMAKE_CURRENT_SOURCE_DIR}/resnet50_torch_export.py
)

add_library(resnet_handler SHARED src/resnet_handler.cc resnet50_pt2.so)
target_link_libraries(resnet_handler PRIVATE ts_backends_core ts_utils ${TORCH_LIBRARIES})
17 changes: 17 additions & 0 deletions examples/cpp/babyllama/CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,20 @@
include(FetchContent)

FetchContent_Declare(
stories15M_bin
URL https://huggingface.co/karpathy/tinyllamas/resolve/main/stories15M.bin
DOWNLOAD_NO_EXTRACT TRUE
DOWNLOAD_DIR ${CMAKE_CURRENT_BINARY_DIR}/
)

FetchContent_Declare(
tokenizer_bin
URL https://github.com/karpathy/llama2.c/raw/master/tokenizer.bin
DOWNLOAD_NO_EXTRACT TRUE
DOWNLOAD_DIR ${CMAKE_CURRENT_BINARY_DIR}/
)

FetchContent_MakeAvailable(tokenizer_bin stories15M_bin)

add_library(llama2_c STATIC ../../../cpp/third-party/llama2.c/run.c)
target_compile_options(llama2_c PRIVATE -Wall -Wextra -Ofast -fPIC)
Expand Down

0 comments on commit a73a08a

Please sign in to comment.