18
18
cmake_minimum_required (VERSION 3.19)
19
19
project (llama_runner)
20
20
21
+ # Duplicating options as root CMakeLists.txt
21
22
option (EXECUTORCH_BUILD_OPTIMIZED "Build the optimized kernels" OFF )
22
23
24
+ include (CMakeDependentOption)
25
+ #
26
+ # pthreadpool: build pthreadpool library. Disable on unsupported platforms
27
+ #
28
+ cmake_dependent_option(
29
+ EXECUTORCH_BUILD_PTHREADPOOL "Build pthreadpool library." ON
30
+ "NOT EXECUTORCH_BUILD_ARM_BAREMETAL" OFF )
31
+ #
32
+ # cpuinfo: build cpuinfo library. Disable on unsupported platforms
33
+ #
34
+ cmake_dependent_option(EXECUTORCH_BUILD_CPUINFO "Build cpuinfo library." ON
35
+ "NOT EXECUTORCH_BUILD_ARM_BAREMETAL" OFF )
36
+
37
+
23
38
if (NOT PYTHON_EXECUTABLE)
24
39
set (PYTHON_EXECUTABLE python3)
25
40
endif ()
@@ -49,55 +64,84 @@ set(_common_compile_options -Wno-deprecated-declarations -fPIC)
49
64
# Let files say "include <executorch/path/to/header.h>".
50
65
set (_common_include_directories ${EXECUTORCH_ROOT} /..)
51
66
52
- # For some reason android build is not able to find where gflags is
53
- # and hence cannot find corresponding .cmake file
67
+ # For some reason android build is not able to find where gflags is and hence
68
+ # cannot find corresponding .cmake file
54
69
set (gflags_DIR ${CMAKE_CURRENT_BINARY_DIR} /../../../third-party/gflags)
55
70
find_package (gflags REQUIRED)
56
71
57
72
#
58
73
# llama_main: test binary to run llama, with tokenizer and sampler integrated
59
74
#
60
- add_executable (llama_main main.cpp
61
- ${CMAKE_CURRENT_SOURCE_DIR} /../../../backends/xnnpack/threadpool/cpuinfo_utils.cpp)
62
- if (CMAKE_BUILD_TYPE EQUAL "RELEASE" )
63
- target_link_options (llama_main PRIVATE "LINKER:--gc-sections" )
64
- endif ()
65
75
66
- # find `executorch` libraries
67
- # Same as for gflags
76
+ # find `executorch` libraries Same as for gflags
68
77
set (executorch_DIR ${CMAKE_CURRENT_BINARY_DIR} /../../../lib/cmake/ExecuTorch)
69
78
find_package (executorch CONFIG REQUIRED)
70
79
if (CMAKE_TOOLCHAIN_IOS OR ANDROID)
71
80
target_link_options_shared_lib(executorch)
72
81
endif ()
73
82
74
83
# custom ops library
75
- add_subdirectory (custom_ops)
84
+ if (EXECUTORCH_BUILD_CUSTOM)
85
+ add_subdirectory (custom_ops)
86
+ endif ()
76
87
77
88
# llama_runner library
78
89
add_subdirectory (runner)
79
90
80
- target_include_directories (llama_main PUBLIC
81
- ${CMAKE_CURRENT_SOURCE_DIR} /../../../backends/xnnpack/third-party/cpuinfo/include )
82
- target_include_directories (llama_main PUBLIC
83
- ${CMAKE_CURRENT_SOURCE_DIR} /../../../backends/xnnpack/third-party/pthreadpool/include )
84
-
85
91
set (link_libraries )
92
+ set (_srcs main.cpp)
86
93
87
94
if (EXECUTORCH_BUILD_OPTIMIZED)
88
- list (APPEND link_libraries optimized_native_cpu_ops_lib optimized_kernels
89
- portable_kernels cpublas eigen_blas)
95
+ list (
96
+ APPEND
97
+ link_libraries
98
+ optimized_native_cpu_ops_lib
99
+ optimized_kernels
100
+ portable_kernels
101
+ cpublas
102
+ eigen_blas)
90
103
target_link_options_shared_lib(optimized_native_cpu_ops_lib)
91
104
else ()
92
105
list (APPEND link_libraries portable_ops_lib portable_kernels)
93
106
target_link_options_shared_lib(portable_ops_lib)
94
107
endif ()
95
108
96
- target_link_libraries (llama_main PUBLIC gflags llama_runner custom_ops_lib)
109
+ if (EXECUTORCH_BUILD_CUSTOM)
110
+ target_link_options_shared_lib(custom_ops_lib)
111
+ list (APPEND link_libraries custom_ops_lib)
112
+ endif ()
113
+
114
+ # Extra compile option and include dir for pthreadpool
115
+ if (EXECUTORCH_BUILD_PTHREADPOOL)
116
+ list (APPEND _common_compile_options -DET_USE_THREADPOOL)
117
+ list (APPEND link_libraries pthreadpool)
118
+ list (
119
+ APPEND
120
+ _srcs
121
+ ${CMAKE_CURRENT_SOURCE_DIR} /../../../backends/xnnpack/threadpool/threadpool.cpp
122
+ ${CMAKE_CURRENT_SOURCE_DIR} /../../../backends/xnnpack/threadpool/threadpool_guard.cpp
123
+ )
124
+ list (APPEND _common_include_directories ${CMAKE_CURRENT_SOURCE_DIR} /../../../backends/xnnpack/third-party/pthreadpool/include )
125
+ endif ()
126
+
127
+ # Extra sources for cpuinfo
128
+ if (EXECUTORCH_BUILD_CPUINFO)
129
+ list (APPEND link_libraries cpuinfo)
130
+ list (
131
+ APPEND
132
+ _srcs
133
+ ${CMAKE_CURRENT_SOURCE_DIR} /../../../backends/xnnpack/threadpool/cpuinfo_utils.cpp
134
+ )
135
+ list (
136
+ APPEND
137
+ _common_include_directories
138
+ ${CMAKE_CURRENT_SOURCE_DIR} /../../../backends/xnnpack/third-party/cpuinfo/include
139
+ )
140
+ endif ()
97
141
98
- # XNNPACK pthreadpool cpuinfo
142
+ # XNNPACK
99
143
if (TARGET xnnpack_backend)
100
- set (xnnpack_backend_libs xnnpack_backend XNNPACK pthreadpool cpuinfo )
144
+ set (xnnpack_backend_libs xnnpack_backend XNNPACK)
101
145
list (APPEND link_libraries ${xnnpack_backend_libs} )
102
146
target_link_options_shared_lib(xnnpack_backend)
103
147
endif ()
@@ -114,15 +158,19 @@ if(TARGET qnn_executorch_backend)
114
158
target_link_options_shared_lib(qnn_executorch_backend)
115
159
endif ()
116
160
117
- # This one is needed for cpuinfo where it uses android
118
- # specific log lib
161
+ # This one is needed for cpuinfo where it uses android specific log lib
119
162
if (ANDROID)
120
163
list (APPEND link_libraries log )
121
164
endif ()
122
165
123
- target_compile_options (llama_main PUBLIC ${_common_compile_options}
124
- -DET_USE_THREADPOOL)
125
- target_link_libraries (llama_main PUBLIC ${link_libraries} )
166
+ add_executable (llama_main ${_srcs} )
167
+ if (CMAKE_BUILD_TYPE EQUAL "RELEASE" )
168
+ target_link_options (llama_main PRIVATE "LINKER:--gc-sections" )
169
+ endif ()
170
+
171
+ target_include_directories (llama_main PUBLIC ${_common_include_directories} )
172
+ target_link_libraries (llama_main PUBLIC gflags llama_runner ${link_libraries} )
173
+ target_compile_options (llama_main PUBLIC ${_common_compile_options} )
126
174
127
175
if (APPLE )
128
176
target_link_options_shared_lib(executorch)
0 commit comments