Skip to content

Commit a73a08a

Browse files
authored
Move file prep from build.sh to cmake (#3014)
* Move file prep from build.sh to cmake * Remove make format call from build.sh
1 parent dcca135 commit a73a08a

File tree

7 files changed

+58
-45
lines changed

7 files changed

+58
-45
lines changed

cpp/build.sh

-41
Original file line numberDiff line numberDiff line change
@@ -20,45 +20,6 @@ function detect_platform() {
2020
echo -e "${COLOR_GREEN}Detected platform: $PLATFORM ${COLOR_OFF}"
2121
}
2222

23-
function prepare_test_files() {
24-
echo -e "${COLOR_GREEN}[ INFO ]Preparing test files ${COLOR_OFF}"
25-
local EX_DIR="${TR_DIR}/examples/"
26-
rsync -a --link-dest=../../test/resources/ ${BASE_DIR}/test/resources/ ${TR_DIR}/
27-
if [ ! -f "${EX_DIR}/babyllama/babyllama_handler/tokenizer.bin" ]; then
28-
wget -q https://github.com/karpathy/llama2.c/raw/master/tokenizer.bin -O "${EX_DIR}/babyllama/babyllama_handler/tokenizer.bin"
29-
fi
30-
if [ ! -f "${EX_DIR}/babyllama/babyllama_handler/stories15M.bin" ]; then
31-
wget -q https://huggingface.co/karpathy/tinyllamas/resolve/main/stories15M.bin -O "${EX_DIR}/babyllama/babyllama_handler/stories15M.bin"
32-
fi
33-
# PT2.2 torch.expport does not support Mac
34-
if [ "$PLATFORM" = "Linux" ]; then
35-
if [ ! -f "${EX_DIR}/aot_inductor/llama_handler/stories15M.so" ]; then
36-
local HANDLER_DIR=${EX_DIR}/aot_inductor/llama_handler/
37-
if [ ! -f "${HANDLER_DIR}/stories15M.pt" ]; then
38-
wget -q https://huggingface.co/karpathy/tinyllamas/resolve/main/stories15M.pt?download=true -O "${HANDLER_DIR}/stories15M.pt"
39-
fi
40-
local LLAMA_SO_DIR=${BASE_DIR}/third-party/llama2.so/
41-
PYTHONPATH=${LLAMA_SO_DIR}:${PYTHONPATH} python ${BASE_DIR}/../examples/cpp/aot_inductor/llama2/compile.py --checkpoint ${HANDLER_DIR}/stories15M.pt ${HANDLER_DIR}/stories15M.so
42-
fi
43-
if [ ! -f "${EX_DIR}/aot_inductor/bert_handler/bert-seq.so" ]; then
44-
pip install transformers
45-
local HANDLER_DIR=${EX_DIR}/aot_inductor/bert_handler/
46-
export TOKENIZERS_PARALLELISM=false
47-
cd ${BASE_DIR}/../examples/cpp/aot_inductor/bert/
48-
python aot_compile_export.py
49-
mv bert-seq.so ${HANDLER_DIR}/bert-seq.so
50-
mv Transformer_model/tokenizer.json ${HANDLER_DIR}/tokenizer.json
51-
export TOKENIZERS_PARALLELISM=""
52-
fi
53-
if [ ! -f "${EX_DIR}/aot_inductor/resnet_handler/resnet50_pt2.so" ]; then
54-
local HANDLER_DIR=${EX_DIR}/aot_inductor/resnet_handler/
55-
cd ${HANDLER_DIR}
56-
python ${BASE_DIR}/../examples/cpp/aot_inductor/resnet/resnet50_torch_export.py
57-
fi
58-
fi
59-
cd "$BWD" || exit
60-
}
61-
6223
function build() {
6324
echo -e "${COLOR_GREEN}[ INFO ]Building backend ${COLOR_OFF}"
6425
MAYBE_BUILD_QUIC=""
@@ -121,7 +82,6 @@ function build() {
12182
fi
12283

12384
make -j "$JOBS"
124-
make format
12585
make install
12686
echo -e "${COLOR_GREEN}torchserve_cpp build is complete. To run unit test: \
12787
./_build/test/torchserve_cpp_test ${COLOR_OFF}"
@@ -207,6 +167,5 @@ cd $BASE_DIR
207167

208168
git submodule update --init --recursive
209169

210-
prepare_test_files
211170
build
212171
install_torchserve_cpp

cpp/src/backends/core/backend.cc

+2
Original file line numberDiff line numberDiff line change
@@ -22,12 +22,14 @@ bool Backend::Initialize(const std::string &model_dir) {
2222
// TODO: windows
2323
TS_LOGF(DEBUG, "Initializing from manifest: {}", manifest_file);
2424
if (!manifest_->Initialize(manifest_file)) {
25+
TS_LOGF(ERROR, "Could not initialize from manifest: {}", manifest_file);
2526
return false;
2627
}
2728

2829
LoadHandler(model_dir);
2930

3031
if (!handler_) {
32+
TS_LOG(ERROR, "Could not load handler");
3133
return false;
3234
}
3335

cpp/src/examples/CMakeLists.txt

+3-1
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,6 @@
11

2+
file(COPY ${CMAKE_CURRENT_SOURCE_DIR}/../../test/resources/ DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/../../test/resources/)
3+
24
add_subdirectory("../../../examples/cpp/babyllama/" "${CMAKE_CURRENT_BINARY_DIR}/../../test/resources/examples/babyllama/babyllama_handler/")
35

46
add_subdirectory("../../../examples/cpp/llamacpp/" "${CMAKE_CURRENT_BINARY_DIR}/../../test/resources/examples/llamacpp/llamacpp_handler/")
@@ -10,6 +12,6 @@ if(CMAKE_SYSTEM_NAME MATCHES "Linux")
1012
add_subdirectory("../../../examples/cpp/aot_inductor/llama2/" "${CMAKE_CURRENT_BINARY_DIR}/../../test/resources/examples/aot_inductor/llama_handler/")
1113

1214
add_subdirectory("../../../examples/cpp/aot_inductor/bert" "${CMAKE_CURRENT_BINARY_DIR}/../../test/resources/examples/aot_inductor/bert_handler/")
13-
15+
1416
add_subdirectory("../../../examples/cpp/aot_inductor/resnet" "${CMAKE_CURRENT_BINARY_DIR}/../../test/resources/examples/aot_inductor/resnet_handler/")
1517
endif()
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,13 @@
1+
2+
add_custom_command(
3+
OUTPUT bert-seq.so
4+
COMMAND TOKENIZERS_PARALLELISM=false python ${CMAKE_CURRENT_SOURCE_DIR}/aot_compile_export.py
5+
COMMAND cp ${CMAKE_CURRENT_BINARY_DIR}/Transformer_model/tokenizer.json ${CMAKE_CURRENT_BINARY_DIR}/
6+
DEPENDS ${CMAKE_CURRENT_SOURCE_DIR}/aot_compile_export.py
7+
)
8+
19
set(TOKENZIER_CPP_PATH ${CMAKE_CURRENT_SOURCE_DIR}/../../../../cpp/third-party/tokenizers-cpp)
210
add_subdirectory(${TOKENZIER_CPP_PATH} tokenizers EXCLUDE_FROM_ALL)
3-
add_library(bert_handler SHARED src/bert_handler.cc)
11+
add_library(bert_handler SHARED src/bert_handler.cc bert-seq.so)
412
target_include_directories(bert_handler PRIVATE ${TOKENZIER_CPP_PATH}/include)
513
target_link_libraries(bert_handler PRIVATE ts_backends_core ts_utils ${TORCH_LIBRARIES} tokenizers_cpp)
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,23 @@
1+
2+
FetchContent_Declare(
3+
stories15M_pt
4+
URL https://huggingface.co/karpathy/tinyllamas/resolve/main/stories15M.pt?download=true
5+
DOWNLOAD_NO_EXTRACT TRUE
6+
DOWNLOAD_DIR ${CMAKE_CURRENT_BINARY_DIR}/
7+
)
8+
9+
FetchContent_MakeAvailable(stories15M_pt)
10+
11+
12+
add_custom_command(
13+
OUTPUT stories15M.so
14+
COMMAND PYTHONPATH=${CMAKE_CURRENT_SOURCE_DIR}/../../../../cpp/third-party/llama2.so/ python ${CMAKE_CURRENT_SOURCE_DIR}/compile.py --checkpoint ${CMAKE_CURRENT_BINARY_DIR}/\'stories15M.pt?download=true\' ${CMAKE_CURRENT_BINARY_DIR}/stories15M.so
15+
DEPENDS ${CMAKE_CURRENT_SOURCE_DIR}/compile.py
16+
)
17+
18+
119
add_library(llama2_so STATIC ../../../../cpp/third-party/llama2.so/run.cpp)
220
target_compile_options(llama2_so PRIVATE -Wall -Wextra -Ofast -fpermissive)
321

4-
add_library(llama_so_handler SHARED src/llama_handler.cc)
22+
add_library(llama_so_handler SHARED src/llama_handler.cc stories15M.so)
523
target_link_libraries(llama_so_handler PRIVATE llama2_so ts_backends_core ts_utils ${TORCH_LIBRARIES})
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,9 @@
1-
add_library(resnet_handler SHARED src/resnet_handler.cc)
1+
2+
add_custom_command(
3+
OUTPUT resnet50_pt2.so
4+
COMMAND python ${CMAKE_CURRENT_SOURCE_DIR}/resnet50_torch_export.py
5+
DEPENDS ${CMAKE_CURRENT_SOURCE_DIR}/resnet50_torch_export.py
6+
)
7+
8+
add_library(resnet_handler SHARED src/resnet_handler.cc resnet50_pt2.so)
29
target_link_libraries(resnet_handler PRIVATE ts_backends_core ts_utils ${TORCH_LIBRARIES})

examples/cpp/babyllama/CMakeLists.txt

+17
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,20 @@
1+
include(FetchContent)
2+
3+
FetchContent_Declare(
4+
stories15M_bin
5+
URL https://huggingface.co/karpathy/tinyllamas/resolve/main/stories15M.bin
6+
DOWNLOAD_NO_EXTRACT TRUE
7+
DOWNLOAD_DIR ${CMAKE_CURRENT_BINARY_DIR}/
8+
)
9+
10+
FetchContent_Declare(
11+
tokenizer_bin
12+
URL https://github.com/karpathy/llama2.c/raw/master/tokenizer.bin
13+
DOWNLOAD_NO_EXTRACT TRUE
14+
DOWNLOAD_DIR ${CMAKE_CURRENT_BINARY_DIR}/
15+
)
16+
17+
FetchContent_MakeAvailable(tokenizer_bin stories15M_bin)
118

219
add_library(llama2_c STATIC ../../../cpp/third-party/llama2.c/run.c)
320
target_compile_options(llama2_c PRIVATE -Wall -Wextra -Ofast -fPIC)

0 commit comments

Comments
 (0)