Skip to content

Commit

Permalink
cleanup code
Browse files Browse the repository at this point in the history
  • Loading branch information
snnn committed Nov 13, 2024
1 parent 6d7603f commit 234a566
Show file tree
Hide file tree
Showing 190 changed files with 5 additions and 10,815 deletions.
54 changes: 0 additions & 54 deletions cmake/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,6 @@ option(onnxruntime_BUILD_CSHARP "Build C# library" OFF)
option(onnxruntime_BUILD_OBJC "Build Objective-C library" OFF)
option(onnxruntime_USE_PREINSTALLED_EIGEN "Use pre-installed EIGEN. Need to provide eigen_SOURCE_PATH if turn this on." OFF)
option(onnxruntime_BUILD_BENCHMARKS "Build ONNXRuntime micro-benchmarks" OFF)
option(onnxruntime_USE_LLVM "Build TVM with LLVM" OFF)
option(onnxruntime_USE_VSINPU "Build with VSINPU support" OFF)

cmake_dependent_option(onnxruntime_USE_FLASH_ATTENTION "Build flash attention kernel for scaled dot product attention" ON "onnxruntime_USE_CUDA" OFF)
Expand Down Expand Up @@ -145,10 +144,6 @@ option(onnxruntime_USE_TELEMETRY "Build with Telemetry" OFF)
cmake_dependent_option(onnxruntime_USE_MIMALLOC "Override new/delete and arena allocator with mimalloc" OFF "WIN32;NOT onnxruntime_USE_CUDA;NOT onnxruntime_USE_OPENVINO" OFF)
option(onnxruntime_USE_CANN "Build with CANN support" OFF)
option(onnxruntime_USE_ROCM "Build with AMD GPU support" OFF)
option(onnxruntime_USE_TVM "Build with TVM support" OFF)
option(onnxruntime_TVM_CUDA_RUNTIME "Build TVM with CUDA support" OFF)
option(onnxruntime_TVM_USE_LLVM "Build TVM with LLVM. Set customized path to llvm-config.exe here if need" OFF)
option(onnxruntime_TVM_USE_HASH "Build ipp-crypto library for support hash algorithm. It is defined for TVM only")
option(onnxruntime_USE_XNNPACK "Build with XNNPACK support. Provides an alternative math library on ARM, WebAssembly and x86." OFF)
option(onnxruntime_USE_WEBNN "Build with WebNN support. Enable hardware acceleration in web browsers." OFF)
option(onnxruntime_USE_WEBGPU "Build with WebGPU support. Enable WebGPU via C/C++ interface." OFF)
Expand Down Expand Up @@ -906,11 +901,6 @@ if (onnxruntime_USE_SNPE)
list(APPEND ONNXRUNTIME_PROVIDER_NAMES snpe)
list(APPEND ORT_PROVIDER_CMAKE_FLAGS -Donnxruntime_USE_SNPE=1)
endif()
if (onnxruntime_USE_TVM)
list(APPEND ORT_PROVIDER_FLAGS -DUSE_TVM=1)
list(APPEND ORT_PROVIDER_CMAKE_FLAGS -Donnxruntime_USE_TVM=1)
list(APPEND ONNXRUNTIME_PROVIDER_NAMES tvm)
endif()
if (onnxruntime_USE_WINML)
list(APPEND ORT_PROVIDER_FLAGS -DUSE_WINML=1)
list(APPEND ORT_PROVIDER_CMAKE_FLAGS -Donnxruntime_USE_WINML=1)
Expand Down Expand Up @@ -1313,50 +1303,6 @@ if (onnxruntime_USE_DNNL)
add_compile_definitions(DNNL_OPENMP)
endif()

# TVM EP
if (onnxruntime_USE_TVM)
if (NOT TARGET tvm)
message(STATUS "Include TVM(*).")
include(tvm)
endif()

# ipp-crypto
if (onnxruntime_TVM_USE_HASH)
message(STATUS "Include ipp-crypto(*).")
include(ipp-crypto)
endif()

# TVM
if (onnxruntime_TVM_USE_LLVM)
set(USE_LLVM "${onnxruntime_TVM_USE_LLVM}" CACHE STRING "Path to LLVM for correct TVM build")
elseif(onnxruntime_USE_LLVM)
set(USE_LLVM ON CACHE BOOL "Only defined for TVM")
endif()

if (onnxruntime_TVM_CUDA_RUNTIME)
set(USE_CUDA ON CACHE BOOL "Only defined for TVM" FORCE)
endif()

# TODO(vvchernov): customized tvm logger is hidden due to the issue on TVM side (https://github.com/apache/tvm/issues/10139)
# add_compile_definitions(TVM_LOG_CUSTOMIZE=1)
# add_library(tvm_custom_logger STATIC ${ONNXRUNTIME_ROOT}/core/providers/tvm/custom_logging.cc)

set(USE_OPENMP gnu CACHE STRING "Only defined for TVM")
add_subdirectory(${tvm_SOURCE_DIR} ${tvm_BINARY_DIR} EXCLUDE_FROM_ALL)

set_target_properties(tvm PROPERTIES FOLDER ${tvm_SOURCE_DIR})
# target_link_libraries(tvm PUBLIC tvm_custom_logger)

set(TVM_INCLUDES ${tvm_SOURCE_DIR}/include
${tvm_SOURCE_DIR}/3rdparty/dmlc-core/include
${tvm_SOURCE_DIR}/3rdparty/dlpack/include
$<TARGET_PROPERTY:tvm,INTERFACE_INCLUDE_DIRECTORIES>)

set(onnxruntime_tvm_libs onnxruntime_providers_tvm)
list(APPEND onnxruntime_EXTERNAL_LIBRARIES tvm)
list(APPEND onnxruntime_EXTERNAL_DEPENDENCIES tvm)
endif()

# onnxruntime-extensions
if (onnxruntime_USE_EXTENSIONS)
include(extensions)
Expand Down
1 change: 0 additions & 1 deletion cmake/onnxruntime.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -221,7 +221,6 @@ set(onnxruntime_INTERNAL_LIBRARIES
${onnxruntime_winml}
onnxruntime_optimizer
onnxruntime_providers
${onnxruntime_tvm_libs}
onnxruntime_lora
onnxruntime_framework
onnxruntime_graph
Expand Down
25 changes: 0 additions & 25 deletions cmake/onnxruntime_codegen_tvm.cmake

This file was deleted.

7 changes: 0 additions & 7 deletions cmake/onnxruntime_providers.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -101,9 +101,6 @@ endif()
if(onnxruntime_USE_ROCM)
set(PROVIDERS_ROCM onnxruntime_providers_rocm)
endif()
if (onnxruntime_USE_TVM)
set(PROVIDERS_TVM onnxruntime_providers_tvm)
endif()
if (onnxruntime_USE_XNNPACK)
set(PROVIDERS_XNNPACK onnxruntime_providers_xnnpack)
endif()
Expand Down Expand Up @@ -194,10 +191,6 @@ if (onnxruntime_USE_ROCM)
include(onnxruntime_providers_rocm.cmake)
endif()

if (onnxruntime_USE_TVM)
include(onnxruntime_providers_tvm.cmake)
endif()

if (onnxruntime_USE_VSINPU)
include(onnxruntime_providers_vsinpu.cmake)
endif()
Expand Down
64 changes: 0 additions & 64 deletions cmake/onnxruntime_providers_tvm.cmake

This file was deleted.

33 changes: 0 additions & 33 deletions cmake/onnxruntime_python.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -169,7 +169,6 @@ endif()
target_link_libraries(onnxruntime_pybind11_state PRIVATE
onnxruntime_session
${onnxruntime_libs}
${PROVIDERS_TVM}
${PROVIDERS_NNAPI}
${PROVIDERS_XNNPACK}
${PROVIDERS_COREML}
Expand All @@ -184,7 +183,6 @@ target_link_libraries(onnxruntime_pybind11_state PRIVATE
onnxruntime_optimizer
onnxruntime_providers
onnxruntime_util
${onnxruntime_tvm_libs}
onnxruntime_lora
onnxruntime_framework
onnxruntime_util
Expand Down Expand Up @@ -965,37 +963,6 @@ if (onnxruntime_USE_ROCM)
)
endif()

if (onnxruntime_USE_TVM)
file(GLOB onnxruntime_python_providers_tvm_srcs CONFIGURE_DEPENDS
"${ONNXRUNTIME_ROOT}/python/providers/tvm/*.py"
)
add_custom_command(
TARGET onnxruntime_pybind11_state POST_BUILD
COMMAND ${CMAKE_COMMAND} -E make_directory $<TARGET_FILE_DIR:${build_output_target}>/onnxruntime/providers
COMMAND ${CMAKE_COMMAND} -E make_directory $<TARGET_FILE_DIR:${build_output_target}>/onnxruntime/providers/tvm
COMMAND ${CMAKE_COMMAND} -E copy
${onnxruntime_python_providers_tvm_srcs}
$<TARGET_FILE_DIR:${build_output_target}>/onnxruntime/providers/tvm
COMMAND ${CMAKE_COMMAND} -E copy
$<TARGET_FILE:onnxruntime_providers_tvm>
$<TARGET_FILE_DIR:${build_output_target}>/onnxruntime/capi/
)

add_custom_command(
TARGET onnxruntime_pybind11_state POST_BUILD
WORKING_DIRECTORY ${tvm_SOURCE_DIR}/python
COMMAND ${Python_EXECUTABLE} setup.py bdist_wheel
)

add_custom_command(
TARGET onnxruntime_pybind11_state POST_BUILD
COMMAND ${Python_EXECUTABLE}
$<TARGET_FILE_DIR:${build_output_target}>/onnxruntime/providers/tvm/extend_python_file.py
--target_file $<TARGET_FILE_DIR:${build_output_target}>/onnxruntime/capi/_ld_preload.py
)

endif()

if (onnxruntime_USE_DML)
if (NOT onnxruntime_USE_CUSTOM_DIRECTML)
set(dml_shared_lib_path ${DML_PACKAGE_DIR}/bin/${onnxruntime_target_platform}-win/${DML_SHARED_LIB})
Expand Down
15 changes: 0 additions & 15 deletions cmake/onnxruntime_unittests.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,6 @@ set(TEST_INC_DIR ${ONNXRUNTIME_ROOT})
if (onnxruntime_ENABLE_TRAINING)
list(APPEND TEST_INC_DIR ${ORTTRAINING_ROOT})
endif()
if (onnxruntime_USE_TVM)
list(APPEND TEST_INC_DIR ${TVM_INCLUDES})
endif()

set(disabled_warnings)
function(AddTest)
Expand Down Expand Up @@ -111,7 +108,6 @@ function(AddTest)
endif()
target_compile_options(${_UT_TARGET} PRIVATE ${disabled_warnings})
else()
target_compile_options(${_UT_TARGET} PRIVATE ${DISABLED_WARNINGS_FOR_TVM})
target_compile_options(${_UT_TARGET} PRIVATE "$<$<COMPILE_LANGUAGE:CUDA>:SHELL:--compiler-options -Wno-error=sign-compare>"
"$<$<NOT:$<COMPILE_LANGUAGE:CUDA>>:-Wno-error=sign-compare>")
if (${HAS_NOERROR})
Expand Down Expand Up @@ -641,13 +637,11 @@ set(ONNXRUNTIME_TEST_LIBS
${PROVIDERS_ACL}
${PROVIDERS_ARMNN}
${PROVIDERS_COREML}
# ${PROVIDERS_TVM}
${PROVIDERS_XNNPACK}
${PROVIDERS_AZURE}
onnxruntime_optimizer
onnxruntime_providers
onnxruntime_util
${onnxruntime_tvm_libs}
onnxruntime_lora
onnxruntime_framework
onnxruntime_util
Expand Down Expand Up @@ -749,12 +743,6 @@ if(onnxruntime_USE_AZURE)
list(APPEND onnxruntime_test_providers_libs onnxruntime_providers_azure)
endif()

if(WIN32)
if (onnxruntime_USE_TVM)
list(APPEND disabled_warnings ${DISABLED_WARNINGS_FOR_TVM})
endif()
endif()

file(GLOB onnxruntime_test_framework_src CONFIGURE_DEPENDS
${onnxruntime_test_framework_src_patterns}
)
Expand Down Expand Up @@ -855,9 +843,6 @@ if (onnxruntime_ENABLE_TRAINING_APIS)
list(APPEND all_tests ${onnxruntime_test_training_api_src})
endif()

if (onnxruntime_USE_TVM)
list(APPEND all_tests ${onnxruntime_test_tvm_src})
endif()

if (onnxruntime_USE_OPENVINO)
list(APPEND all_tests ${onnxruntime_test_openvino_src})
Expand Down
Loading

0 comments on commit 234a566

Please sign in to comment.