diff --git a/CMakeLists.txt b/CMakeLists.txt index 57b6aed..2a959e2 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -7,8 +7,7 @@ cmake_minimum_required(VERSION 3.15) # Shall the library be built as a shared library? option(BUILD_SHARED_LIBS "Build the library as a shared library" ON) option(ANIRA_WITH_BENCHMARK "Build the library with benchmarking capabilities" OFF) -option(ANIRA_WITH_EXAMPLES "Add Example Targets for desktop plattforms (juce plugin, benchmarks, minimal inference and model examples)" OFF) -option(ANIRA_WITH_BELA_EXAMPLE "Add Example Targets for bela plattform (includes a model)" OFF) +option(ANIRA_WITH_EXAMPLES "Add example targets (juce plugin, benchmarks, minimal inference and model examples)" OFF) option(ANIRA_WITH_INSTALL "Add install targets" OFF) option(ANIRA_WITH_TESTS "Add Build Tests" OFF) @@ -62,6 +61,14 @@ if(APPLE) endif() endif() +if(APPLE) + set(ANIRA_OPERATING_SYSTEM "macOS") +elseif(UNIX) + set(ANIRA_OPERATING_SYSTEM "Linux") +elseif(WIN32) + set(ANIRA_OPERATING_SYSTEM "Windows") +endif() + # Print the processor architecture selected for build. Defaults to CMAKE_HOST_SYSTEM_PROCESSOR when no crosscompile tolchain is defined. CMAKE_HOST_SYSTEM_PROCESSOR is only defined after the project() call. For OSX this value can be overwritten by the CMAKE_OSX_ARCHITECTURES. message(STATUS "Building ${PROJECT_NAME} for ${CMAKE_SYSTEM_NAME} on ${CMAKE_SYSTEM_PROCESSOR}") @@ -198,9 +205,13 @@ if(ANIRA_WITH_LIBTORCH) # The find_package(Torch) adds the libraries libc10.so and libkineto.a as full paths to ${TORCH_LIBRARIES}. This is no problem when we add anira as a subdirectory to another project, but when we install the library, the torch libraries will be link targets of the anira library with full paths and hence not found on other systems. Therefore, we link those libs privately and only add the torch target publicly. # Also until cmake 3.26, there is a bug where the torch_cpu library is not found when linking publicly https://gitlab.kitware.com/cmake/cmake/-/issues/24163 and anira is added as a subdirectory to another project, see # But this is necessary for when we install the library since otherwise symbols are not found + # Another problem are that on armv7l with benchmarking enabled, some symbols are not found when linking the torch_cpu library privately if (CMAKE_VERSION VERSION_LESS "3.26.0" AND NOT (CMAKE_SOURCE_DIR STREQUAL CMAKE_CURRENT_SOURCE_DIR)) target_link_libraries(${PROJECT_NAME} PRIVATE ${TORCH_LIBRARIES}) set(TORCH_LIBRARIES_ALL_PRIVATE TRUE) + if(UNIX AND NOT APPLE AND CMAKE_SYSTEM_PROCESSOR STREQUAL "armv7l") + target_link_libraries(${PROJECT_NAME} PUBLIC torch_cpu) + endif() else() foreach(TORCH_LIB ${TORCH_LIBRARIES}) if(TORCH_LIB STREQUAL "torch" OR TORCH_LIB STREQUAL "torch_library") @@ -250,11 +261,11 @@ endif() # ============================================================================== # First we import the extras folder since we need the compile definitions (model paths) for the examples -if(ANIRA_WITH_EXAMPLES OR ANIRA_WITH_BELA_EXAMPLE OR ANIRA_WITH_TESTS) +if(ANIRA_WITH_EXAMPLES OR ANIRA_WITH_TESTS) add_subdirectory(extras) endif() -if(ANIRA_WITH_EXAMPLES OR ANIRA_WITH_BELA_EXAMPLE) +if(ANIRA_WITH_EXAMPLES) add_subdirectory(examples) endif() diff --git a/README.md b/README.md index ca13d17..b85a21e 100644 --- a/README.md +++ b/README.md @@ -123,7 +123,6 @@ Moreover, the following options are available: - Build anira with benchmark capabilities: `-DANIRA_WITH_BENCHMARK=ON` - Build example applications, plugins and populate example neural models: `-DANIRA_WITH_EXAMPLES=ON` -- Build a Bela example application: `-DANIRA_WITH_BELA_EXAMPLE=ON` - Build anira with tests: `-DANIRA_WITH_TESTS=ON` ## Documentation @@ -139,12 +138,10 @@ anira allows users to benchmark and compare the inference performance of differe ### Build in examples -- [Simple JUCE Audio Plugin](examples/desktop/juce-audio-plugin/): Demonstrates how to use anira in a real-time audio JUCE / VST3-Plugin. -- [CLAP Plugin Example](examples/desktop/clap-audio-plugin/): Demonstrates how to use anira in a real-time clap plugin. -- [Benchmark](examples/desktop/benchmark/): Demonstrates how to use anira for benchmarking of different neural network models, backends and audio configurations. -- [Minimal Inference](examples/desktop/minimal-inference/): Demonstrates how minimal inference applications can be implemented in all three backends. -- [Bela Example](examples/embedded/bela/bela-inference/): Demonstrates how to use anira in a real-time audio application on the Bela platform. -- [Bela Benchmark](examples/embedded/bela/bela-benchmark/): Demonstrates how to use anira for benchmarking on the Bela platform. +- [Simple JUCE Audio Plugin](examples/juce-audio-plugin/): Demonstrates how to use anira in a real-time audio JUCE / VST3-Plugin. +- [CLAP Plugin Example](examples/clap-audio-plugin/): Demonstrates how to use anira in a real-time clap plugin. +- [Benchmark](examples/benchmark/): Demonstrates how to use anira for benchmarking of different neural network models, backends and audio configurations. +- [Minimal Inference](examples/minimal-inference/): Demonstrates how minimal inference applications can be implemented in all three backends. ### Other examples diff --git a/cmake/SetupLibTorch.cmake b/cmake/SetupLibTorch.cmake index 0ff32ea..49ffc26 100644 --- a/cmake/SetupLibTorch.cmake +++ b/cmake/SetupLibTorch.cmake @@ -16,7 +16,7 @@ else() endif() option(LIBTORCH_ROOTDIR "libtorch root dir") -set(LIBTORCH_DIR_NAME "libtorch-${LIBTORCH_VERSION}${TORCH_BUILD_TYPE}") +set(LIBTORCH_DIR_NAME "libtorch-${LIBTORCH_VERSION}${TORCH_BUILD_TYPE}-${ANIRA_OPERATING_SYSTEM}-${CMAKE_SYSTEM_PROCESSOR}") set(LIBTORCH_ROOTDIR ${CMAKE_CURRENT_SOURCE_DIR}/modules/${LIBTORCH_DIR_NAME}) if(EXISTS ${LIBTORCH_ROOTDIR}/) diff --git a/cmake/SetupOnnxRuntime.cmake b/cmake/SetupOnnxRuntime.cmake index fb0ff9d..dce4ed2 100644 --- a/cmake/SetupOnnxRuntime.cmake +++ b/cmake/SetupOnnxRuntime.cmake @@ -1,7 +1,7 @@ set(LIBONNXRUNTIME_VERSION 1.19.2) option(ONNXRUNTIME_ROOTDIR "onnxruntime root dir") -set(ONNXRUNTIME_DIR_NAME "onnxruntime-${LIBONNXRUNTIME_VERSION}") +set(ONNXRUNTIME_DIR_NAME "onnxruntime-${LIBONNXRUNTIME_VERSION}-${ANIRA_OPERATING_SYSTEM}-${CMAKE_SYSTEM_PROCESSOR}") set(ONNXRUNTIME_ROOTDIR ${CMAKE_CURRENT_SOURCE_DIR}/modules/${ONNXRUNTIME_DIR_NAME}) if(EXISTS ${ONNXRUNTIME_ROOTDIR}/) diff --git a/cmake/SetupTensorflowLite.cmake b/cmake/SetupTensorflowLite.cmake index c1a1a61..2e6f5e8 100644 --- a/cmake/SetupTensorflowLite.cmake +++ b/cmake/SetupTensorflowLite.cmake @@ -5,7 +5,7 @@ else() endif() option(TENSORFLOWLITE_ROOTDIR "tensorflowlite root dir") -set(TENSORFLOWLITE_DIR_NAME "tensorflowlite-${LIBTENSORFLOWLITE_VERSION}") +set(TENSORFLOWLITE_DIR_NAME "tensorflowlite-${LIBTENSORFLOWLITE_VERSION}-${ANIRA_OPERATING_SYSTEM}-${CMAKE_SYSTEM_PROCESSOR}") set(TENSORFLOWLITE_ROOTDIR ${CMAKE_CURRENT_SOURCE_DIR}/modules/${TENSORFLOWLITE_DIR_NAME}) if(EXISTS ${TENSORFLOWLITE_ROOTDIR}/) diff --git a/cmake/Toolchain_Bela.cmake b/cmake/Toolchain_Bela.cmake deleted file mode 100644 index 398a9a7..0000000 --- a/cmake/Toolchain_Bela.cmake +++ /dev/null @@ -1,13 +0,0 @@ -# targets -set(CMAKE_SYSTEM_NAME Linux) -set(CMAKE_SYSTEM_PROCESSOR armv7l) - -# compiler settings -set(CMAKE_C_COMPILER /usr/bin/arm-linux-gnueabihf-gcc) -set(CMAKE_CXX_COMPILER /usr/bin/arm-linux-gnueabihf-g++) - -# give an option to distinguish -set(BELA TRUE) - -set(CMAKE_SYSROOT /sysroot) -set(BELA_ROOT "${CMAKE_SYSROOT}/root/Bela") \ No newline at end of file diff --git a/cmake/install.cmake b/cmake/install.cmake index beb4fe0..e931882 100644 --- a/cmake/install.cmake +++ b/cmake/install.cmake @@ -98,9 +98,15 @@ endif() # the other ones don't have cmake config files so we have to install them manually if(ANIRA_WITH_ONNXRUNTIME) - install(DIRECTORY "${ONNXRUNTIME_ROOTDIR}/include/" - DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}" - ) + if(UNIX AND NOT APPLE AND CMAKE_SYSTEM_PROCESSOR STREQUAL "armv7l") + install(DIRECTORY "${ONNXRUNTIME_ROOTDIR}/include/onnxruntime/" + DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}" + ) + else() + install(DIRECTORY "${ONNXRUNTIME_ROOTDIR}/include/" + DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}" + ) + endif() install(DIRECTORY "${ONNXRUNTIME_ROOTDIR}/lib/" DESTINATION "${CMAKE_INSTALL_LIBDIR}" ) diff --git a/examples/CMakeLists.txt b/examples/CMakeLists.txt index 8853740..134b57d 100644 --- a/examples/CMakeLists.txt +++ b/examples/CMakeLists.txt @@ -1,5 +1,18 @@ -if(ANIRA_WITH_EXAMPLES) - add_subdirectory(desktop) -elseif(ANIRA_WITH_BELA_EXAMPLE) - add_subdirectory(embedded/bela) -endif() \ No newline at end of file +add_compile_definitions( + GUITARLSTM_MODELS_PATH_TENSORFLOW="${GUITARLSTM_MODELS_PATH_TENSORFLOW}" + GUITARLSTM_MODELS_PATH_PYTORCH="${GUITARLSTM_MODELS_PATH_PYTORCH}" + STEERABLENAFX_MODELS_PATH_TENSORFLOW="${STEERABLENAFX_MODELS_PATH_TENSORFLOW}" + STEERABLENAFX_MODELS_PATH_PYTORCH="${STEERABLENAFX_MODELS_PATH_PYTORCH}" + STATEFULLSTM_MODELS_PATH_TENSORFLOW="${STATEFULLSTM_MODELS_PATH_TENSORFLOW}" + STATEFULLSTM_MODELS_PATH_PYTORCH="${STATEFULLSTM_MODELS_PATH_PYTORCH}" + SIMPLEGAIN_MODEL_PATH="${SIMPLEGAIN_MODEL_PATH}" +) + + +if(ANIRA_WITH_BENCHMARK) + add_subdirectory(benchmark) +endif() + +add_subdirectory(minimal-inference) +add_subdirectory(juce-audio-plugin) +add_subdirectory(clap-audio-plugin) \ No newline at end of file diff --git a/examples/desktop/benchmark/CMakeLists.txt b/examples/benchmark/CMakeLists.txt similarity index 100% rename from examples/desktop/benchmark/CMakeLists.txt rename to examples/benchmark/CMakeLists.txt diff --git a/examples/desktop/benchmark/advanced-benchmark/CMakeLists.txt b/examples/benchmark/advanced-benchmark/CMakeLists.txt similarity index 100% rename from examples/desktop/benchmark/advanced-benchmark/CMakeLists.txt rename to examples/benchmark/advanced-benchmark/CMakeLists.txt diff --git a/examples/desktop/benchmark/advanced-benchmark/ClearCustomProcessor.h b/examples/benchmark/advanced-benchmark/ClearCustomProcessor.h similarity index 100% rename from examples/desktop/benchmark/advanced-benchmark/ClearCustomProcessor.h rename to examples/benchmark/advanced-benchmark/ClearCustomProcessor.h diff --git a/examples/desktop/benchmark/advanced-benchmark/defineAdvancedBenchmark.cpp b/examples/benchmark/advanced-benchmark/defineAdvancedBenchmark.cpp similarity index 100% rename from examples/desktop/benchmark/advanced-benchmark/defineAdvancedBenchmark.cpp rename to examples/benchmark/advanced-benchmark/defineAdvancedBenchmark.cpp diff --git a/examples/desktop/benchmark/advanced-benchmark/defineTestAdvancedBenchmark.cpp b/examples/benchmark/advanced-benchmark/defineTestAdvancedBenchmark.cpp similarity index 100% rename from examples/desktop/benchmark/advanced-benchmark/defineTestAdvancedBenchmark.cpp rename to examples/benchmark/advanced-benchmark/defineTestAdvancedBenchmark.cpp diff --git a/examples/desktop/benchmark/cnn-size-benchmark/CMakeLists.txt b/examples/benchmark/cnn-size-benchmark/CMakeLists.txt similarity index 100% rename from examples/desktop/benchmark/cnn-size-benchmark/CMakeLists.txt rename to examples/benchmark/cnn-size-benchmark/CMakeLists.txt diff --git a/examples/desktop/benchmark/cnn-size-benchmark/defineCNNSizeBenchmark.cpp b/examples/benchmark/cnn-size-benchmark/defineCNNSizeBenchmark.cpp similarity index 100% rename from examples/desktop/benchmark/cnn-size-benchmark/defineCNNSizeBenchmark.cpp rename to examples/benchmark/cnn-size-benchmark/defineCNNSizeBenchmark.cpp diff --git a/examples/desktop/benchmark/cnn-size-benchmark/defineTestCNNSizeBenchmark.cpp b/examples/benchmark/cnn-size-benchmark/defineTestCNNSizeBenchmark.cpp similarity index 100% rename from examples/desktop/benchmark/cnn-size-benchmark/defineTestCNNSizeBenchmark.cpp rename to examples/benchmark/cnn-size-benchmark/defineTestCNNSizeBenchmark.cpp diff --git a/examples/desktop/benchmark/simple-benchmark/CMakeLists.txt b/examples/benchmark/simple-benchmark/CMakeLists.txt similarity index 100% rename from examples/desktop/benchmark/simple-benchmark/CMakeLists.txt rename to examples/benchmark/simple-benchmark/CMakeLists.txt diff --git a/examples/desktop/benchmark/simple-benchmark/defineSimpleBenchmark.cpp b/examples/benchmark/simple-benchmark/defineSimpleBenchmark.cpp similarity index 100% rename from examples/desktop/benchmark/simple-benchmark/defineSimpleBenchmark.cpp rename to examples/benchmark/simple-benchmark/defineSimpleBenchmark.cpp diff --git a/examples/desktop/benchmark/simple-benchmark/defineTestSimpleBenchmark.cpp b/examples/benchmark/simple-benchmark/defineTestSimpleBenchmark.cpp similarity index 100% rename from examples/desktop/benchmark/simple-benchmark/defineTestSimpleBenchmark.cpp rename to examples/benchmark/simple-benchmark/defineTestSimpleBenchmark.cpp diff --git a/examples/desktop/clap-audio-plugin/CMakeLists.txt b/examples/clap-audio-plugin/CMakeLists.txt similarity index 100% rename from examples/desktop/clap-audio-plugin/CMakeLists.txt rename to examples/clap-audio-plugin/CMakeLists.txt diff --git a/examples/desktop/clap-audio-plugin/anira-clap-demo-pluginentry.cpp b/examples/clap-audio-plugin/anira-clap-demo-pluginentry.cpp similarity index 100% rename from examples/desktop/clap-audio-plugin/anira-clap-demo-pluginentry.cpp rename to examples/clap-audio-plugin/anira-clap-demo-pluginentry.cpp diff --git a/examples/desktop/clap-audio-plugin/anira-clap-demo.cpp b/examples/clap-audio-plugin/anira-clap-demo.cpp similarity index 100% rename from examples/desktop/clap-audio-plugin/anira-clap-demo.cpp rename to examples/clap-audio-plugin/anira-clap-demo.cpp diff --git a/examples/desktop/clap-audio-plugin/anira-clap-demo.h b/examples/clap-audio-plugin/anira-clap-demo.h similarity index 100% rename from examples/desktop/clap-audio-plugin/anira-clap-demo.h rename to examples/clap-audio-plugin/anira-clap-demo.h diff --git a/examples/desktop/clap-audio-plugin/cmake/anira-clap-demo.plist.in b/examples/clap-audio-plugin/cmake/anira-clap-demo.plist.in similarity index 100% rename from examples/desktop/clap-audio-plugin/cmake/anira-clap-demo.plist.in rename to examples/clap-audio-plugin/cmake/anira-clap-demo.plist.in diff --git a/examples/desktop/clap-audio-plugin/utils/DryWetMixer.cpp b/examples/clap-audio-plugin/utils/DryWetMixer.cpp similarity index 100% rename from examples/desktop/clap-audio-plugin/utils/DryWetMixer.cpp rename to examples/clap-audio-plugin/utils/DryWetMixer.cpp diff --git a/examples/desktop/clap-audio-plugin/utils/DryWetMixer.h b/examples/clap-audio-plugin/utils/DryWetMixer.h similarity index 100% rename from examples/desktop/clap-audio-plugin/utils/DryWetMixer.h rename to examples/clap-audio-plugin/utils/DryWetMixer.h diff --git a/examples/desktop/CMakeLists.txt b/examples/desktop/CMakeLists.txt deleted file mode 100644 index 134b57d..0000000 --- a/examples/desktop/CMakeLists.txt +++ /dev/null @@ -1,18 +0,0 @@ -add_compile_definitions( - GUITARLSTM_MODELS_PATH_TENSORFLOW="${GUITARLSTM_MODELS_PATH_TENSORFLOW}" - GUITARLSTM_MODELS_PATH_PYTORCH="${GUITARLSTM_MODELS_PATH_PYTORCH}" - STEERABLENAFX_MODELS_PATH_TENSORFLOW="${STEERABLENAFX_MODELS_PATH_TENSORFLOW}" - STEERABLENAFX_MODELS_PATH_PYTORCH="${STEERABLENAFX_MODELS_PATH_PYTORCH}" - STATEFULLSTM_MODELS_PATH_TENSORFLOW="${STATEFULLSTM_MODELS_PATH_TENSORFLOW}" - STATEFULLSTM_MODELS_PATH_PYTORCH="${STATEFULLSTM_MODELS_PATH_PYTORCH}" - SIMPLEGAIN_MODEL_PATH="${SIMPLEGAIN_MODEL_PATH}" -) - - -if(ANIRA_WITH_BENCHMARK) - add_subdirectory(benchmark) -endif() - -add_subdirectory(minimal-inference) -add_subdirectory(juce-audio-plugin) -add_subdirectory(clap-audio-plugin) \ No newline at end of file diff --git a/examples/embedded/bela/CMakeLists.txt b/examples/embedded/bela/CMakeLists.txt deleted file mode 100644 index 9b4fe0b..0000000 --- a/examples/embedded/bela/CMakeLists.txt +++ /dev/null @@ -1,9 +0,0 @@ -add_compile_definitions( - BELA_MODELS_PATH="/root/models" -) - -if(ANIRA_WITH_BENCHMARK) - add_subdirectory(bela-benchmark) -endif() - -add_subdirectory(bela-inference) \ No newline at end of file diff --git a/examples/embedded/bela/SimpleGainConfig.h b/examples/embedded/bela/SimpleGainConfig.h deleted file mode 100644 index e8685a0..0000000 --- a/examples/embedded/bela/SimpleGainConfig.h +++ /dev/null @@ -1,30 +0,0 @@ -#ifndef ANIRA_SIMPLEGAINCONFIG_H -#define ANIRA_SIMPLEGAINCONFIG_H - -#include - -static std::vector model_data_gain_config = { -#ifdef USE_LIBTORCH - {BELA_MODELS_PATH + std::string("/simple_gain_network_mono.pt"), anira::InferenceBackend::LIBTORCH}, -#endif -#ifdef USE_ONNXRUNTIME - {BELA_MODELS_PATH + std::string("/simple_gain_network_mono.onnx"), anira::InferenceBackend::ONNX}, -#endif -#ifdef USE_TFLITE - {BELA_MODELS_PATH + std::string("/simple_gain_network_mono.tflite"), anira::InferenceBackend::TFLITE}, -#endif -}; - -static std::vector tensor_shape_gain_config = { - {{{1, 1, 512}, {1}}, {{1, 1, 512}, {1}}}, // When no backend is specified, the tensor shape is seen as universal for all backends -}; - -static anira::InferenceConfig gain_config( - model_data_gain_config, - tensor_shape_gain_config, - 5.f, - 0, - 1 -); - -#endif //ANIRA_SIMPLEGAINCONFIG_H diff --git a/examples/embedded/bela/bela-benchmark/CMakeLists.txt b/examples/embedded/bela/bela-benchmark/CMakeLists.txt deleted file mode 100644 index fd78212..0000000 --- a/examples/embedded/bela/bela-benchmark/CMakeLists.txt +++ /dev/null @@ -1,49 +0,0 @@ -cmake_minimum_required(VERSION 3.15) - -# ============================================================================== -# Setup the project -# ============================================================================== - -set (PROJECT_NAME bela-benchmark) - -project (${PROJECT_NAME} VERSION 0.0.1) - -# Sets the cpp language minimum -set(CMAKE_CXX_STANDARD 20) -set(CMAKE_CXX_STANDARD_REQUIRED True) - -# Make sure to install the anira library under /root/anira if using the precompiled version of anira or set ANIRA_WITH_INSTALL to ON when adding the anira library as a subdirectory -# Obviously, this benchmark requires the anira library to be build with the benchmark option enabled - -################################################################ -# Setting up anira -################################################################ - -# set(ANIRA_WITH_BENCHMARK ON) -# set(ANIRA_WITH_ONNXRUNTIME OFF) # Not yet supported -# set(ANIRA_WITH_TFLITE OFF) # Not yet supported -# # In case the libs from anira shall be installed -# # This only works when the anira library is a subdirectory of the project -# set(ANIRA_WITH_INSTALL ON) -# add_subdirectory(anira) # set this to the path of the anira library if its a submodule of your repository -# list(APPEND CMAKE_PREFIX_PATH "anira-0.1.0-bela") # Use this if you use the precompiled version of anira -# find_package(anira REQUIRED) - -################################################################ -# Adding the executable -################################################################ - -add_executable(${PROJECT_NAME}) - -target_sources(${PROJECT_NAME} PRIVATE - defineBelaBenchmark.cpp - main.cpp -) - -target_link_libraries(${PROJECT_NAME} anira::anira) - -################################################################ -# Installing the executable -################################################################ - -include(cmake/install.cmake) diff --git a/examples/embedded/bela/bela-benchmark/cmake/install.cmake b/examples/embedded/bela/bela-benchmark/cmake/install.cmake deleted file mode 100644 index c9c4559..0000000 --- a/examples/embedded/bela/bela-benchmark/cmake/install.cmake +++ /dev/null @@ -1,26 +0,0 @@ -# ============================================================================== -# Install the executable -# ============================================================================== - -# for CMAKE_INSTALL_INCLUDEDIR and others definition -include(GNUInstallDirs) - -# define the directory where the executable will be installed CMAKE_INSTALL_PREFIX -set(CMAKE_INSTALL_PREFIX "${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}-${PROJECT_VERSION}" CACHE PATH "Where the library will be installed to" FORCE) - -set(CUSTOM_RPATH "/root/anira/lib") -if (ANIRA_WITH_INSTALL) - list(APPEND CUSTOM_RPATH "$ORIGIN/../lib") -endif() -set_target_properties(${PROJECT_NAME} - PROPERTIES - INSTALL_RPATH "${CUSTOM_RPATH}" -) - -# install the target -install(TARGETS ${PROJECT_NAME} - # these get default values from GNUInstallDirs - RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR} - LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR} - BUNDLE DESTINATION ${CMAKE_INSTALL_BINDIR} -) diff --git a/examples/embedded/bela/bela-benchmark/defineBelaBenchmark.cpp b/examples/embedded/bela/bela-benchmark/defineBelaBenchmark.cpp deleted file mode 100644 index 3585b88..0000000 --- a/examples/embedded/bela/bela-benchmark/defineBelaBenchmark.cpp +++ /dev/null @@ -1,78 +0,0 @@ -#include -#include -#include -#include - -/* ============================================================ * - * ========================= Configs ========================== * - * ============================================================ */ - -#define NUM_ITERATIONS 50 -#define NUM_REPETITIONS 10 -#define BUFFER_SIZE 2048 -#define SAMPLE_RATE 44100 - -/* ============================================================ * - * ================== BENCHMARK DEFINITIONS =================== * - * ============================================================ */ - -typedef anira::benchmark::ProcessBlockFixture ProcessBlockFixture; - -anira::PrePostProcessor my_pp_processor; -anira::InferenceConfig my_inference_config( - "model.pt", - {{1, 1, 2048}}, - {{1, 1, 2048}}, - 21.53f, - 0, - true, - 0.f, - false, - 1 -); - -BENCHMARK_DEFINE_F(ProcessBlockFixture, BM_SIMPLE)(::benchmark::State& state) { - - // The buffer size return in get_buffer_size() is populated by state.range(0) param of the google benchmark - anira::HostAudioConfig host_config = {1, (size_t) get_buffer_size(), SAMPLE_RATE}; - anira::InferenceBackend inference_backend = anira::LIBTORCH; - - m_inference_handler = std::make_unique(my_pp_processor, my_inference_config); - m_inference_handler->prepare(host_config); - m_inference_handler->set_inference_backend(inference_backend); - - m_buffer = std::make_unique>(my_inference_config.m_num_audio_channels[anira::Input], host_config.m_host_buffer_size); - - initialize_repetition(my_inference_config, host_config, inference_backend); - - for (auto _ : state) { - push_random_samples_in_buffer(host_config); - - initialize_iteration(); - - auto start = std::chrono::high_resolution_clock::now(); - - m_inference_handler->process(m_buffer->get_array_of_write_pointers(), get_buffer_size()); - - // Using yield here is important to let the inference thread run - // Depending on the scheduler, yield does different things, a first-in-first-out realtime scheduler (SCHED_FIFO in Linux) would suspend the current thread and put it on the back of the queue of the same-priority threads that are ready to run (and if there are no other threads at the same priority, yield has no effect). - while (!buffer_processed()) { - std::this_thread::yield(); - } - - auto end = std::chrono::high_resolution_clock::now(); - - interation_step(start, end, state); - } - repetition_step(); -} - -// /* ============================================================ * -// * ================== BENCHMARK REGISTRATION ================== * -// * ============================================================ */ - -BENCHMARK_REGISTER_F(ProcessBlockFixture, BM_SIMPLE) -->Unit(benchmark::kMillisecond) -->Iterations(NUM_ITERATIONS)->Repetitions(NUM_REPETITIONS) -->Arg(BUFFER_SIZE) -->UseManualTime(); \ No newline at end of file diff --git a/examples/embedded/bela/bela-benchmark/main.cpp b/examples/embedded/bela/bela-benchmark/main.cpp deleted file mode 100644 index 1494e35..0000000 --- a/examples/embedded/bela/bela-benchmark/main.cpp +++ /dev/null @@ -1,16 +0,0 @@ -#include -#include - -int main(int argc, char** argv) { - // Initialize benchmark - benchmark::Initialize(&argc, argv); - - pthread_t self = pthread_self(); - - anira::HighPriorityThread::elevate_priority(self, true); - - // Run benchmark - benchmark::RunSpecifiedBenchmarks(); - - return 0; -} \ No newline at end of file diff --git a/examples/embedded/bela/bela-inference/CMakeLists.txt b/examples/embedded/bela/bela-inference/CMakeLists.txt deleted file mode 100644 index df61b83..0000000 --- a/examples/embedded/bela/bela-inference/CMakeLists.txt +++ /dev/null @@ -1,110 +0,0 @@ -cmake_minimum_required(VERSION 3.18) - -set(PROJECT_NAME "bela-inference") - -project(${PROJECT_NAME} VERSION 0.0.1) - -message(STATUS "Building project ${PROJECT_NAME}") - -# Sets the cpp language minimum -set(CMAKE_CXX_STANDARD 20) -set(CMAKE_CXX_STANDARD_REQUIRED True) - -# Make sure to install the anira library under /root/anira if using the precompiled version of anira or set ANIRA_WITH_INSTALL to ON when adding the anira library as a subdirectory - -################################################################ -# Setup required by Bela -################################################################ - -add_compile_options( - -march=armv7-a - -mtune=cortex-a8 - -mfloat-abi=hard - -mfpu=neon - -Wno-psabi -) - -add_compile_options( - -O3 - -g - -fPIC - -ftree-vectorize - -ffast-math -) - -add_compile_definitions(DXENOMAI_SKIN_posix) - -################################################################ - -find_library(COBALT_LIB REQUIRED - NAMES cobalt libcobalt - HINTS "${CMAKE_SYSROOT}/usr/xenomai/lib" -) - -find_library(NEON_LIB REQUIRED - NAMES NE10 libNE10 - HINTS "${CMAKE_SYSROOT}/usr/lib" -) - -find_library(MATHNEON_LIB REQUIRED - NAMES mathneon libmathneon - HINTS "${CMAKE_SYSROOT}/usr/lib" -) - -################################################################ -# Setting up anira -################################################################ - -# set(ANIRA_WITH_ONNXRUNTIME OFF) # Not yet supported -# set(ANIRA_WITH_TFLITE OFF) # Not yet supported -# # In case the libs from anira shall be installed -# # This only works when the anira library is a subdirectory of the project -# set(ANIRA_WITH_INSTALL ON) -# add_subdirectory(anira) # set this to the path of the anira library if its a submodule of your repository -# list(APPEND CMAKE_PREFIX_PATH "anira-0.1.0-bela") # Use this if you use the precompiled version of anira -# find_package(anira REQUIRED) - -################################################################ -# Adding the executable -################################################################ - -set(EXE_NAME ${PROJECT_NAME}) - -file(GLOB SRC_FILES *.cpp) - -add_executable(${EXE_NAME} ${SRC_FILES}) - -################################################################ -# Adding include directories from cross-compilation toolchain -################################################################ - -target_include_directories( - ${EXE_NAME} PRIVATE ${BELA_ROOT} ${BELA_ROOT}/include -) - -################################################################ -# Linking the required libraries -################################################################ - -target_link_libraries( - ${EXE_NAME} - PRIVATE - ${BELA_ROOT}/lib/libbelafull.so - anira::anira - ${COBALT_LIB} - ${NEON_LIB} - ${MATHNEON_LIB} - dl - prussdrv - asound - atomic - sndfile - pthread - rt -) - -################################################################ -# Installing the executable -################################################################ - -include(cmake/install.cmake) diff --git a/examples/embedded/bela/bela-inference/cmake/install.cmake b/examples/embedded/bela/bela-inference/cmake/install.cmake deleted file mode 100644 index 62a8595..0000000 --- a/examples/embedded/bela/bela-inference/cmake/install.cmake +++ /dev/null @@ -1,28 +0,0 @@ -# ============================================================================== -# Install the executable -# ============================================================================== - -# for CMAKE_INSTALL_INCLUDEDIR and others definition -include(GNUInstallDirs) - -# define the directory where the executable will be installed CMAKE_INSTALL_PREFIX -set(CMAKE_INSTALL_PREFIX "${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}-${PROJECT_VERSION}" CACHE PATH "Where the library will be installed to" FORCE) - -if (ANIRA_WITH_INSTALL) - list(APPEND CUSTOM_RPATH "$ORIGIN/../lib") -else() - list(APPEND CUSTOM_RPATH "/root/anira/lib") -endif() - -set_target_properties(${PROJECT_NAME} - PROPERTIES - INSTALL_RPATH "${CUSTOM_RPATH}" -) - -# install the target -install(TARGETS ${PROJECT_NAME} - # these get default values from GNUInstallDirs - RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR} - LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR} - BUNDLE DESTINATION ${CMAKE_INSTALL_BINDIR} -) diff --git a/examples/embedded/bela/bela-inference/main.cpp b/examples/embedded/bela/bela-inference/main.cpp deleted file mode 100644 index 135df7f..0000000 --- a/examples/embedded/bela/bela-inference/main.cpp +++ /dev/null @@ -1,114 +0,0 @@ -/* - * default_main.cpp - * - * Created on: Oct 24, 2014 - * Author: parallels - */ -#include -#include -#include -#include -#include -#include -#include -#include - -using namespace std; - -// Handle Ctrl-C by requesting that the audio rendering stop -void interrupt_handler(int var) -{ - Bela_requestStop(); -} - -// Print usage information -void usage(const char * processName) -{ - cerr << "Usage: " << processName << " [options]" << endl; - - Bela_usage(); - - cerr << " --help [-h]: Print this menu\n"; -} - -int main(int argc, char *argv[]) -{ - BelaInitSettings* settings = Bela_InitSettings_alloc(); // Standard audio settings - - struct option customOptions[] = - { - {"help", 0, NULL, 'h'}, - {NULL, 0, NULL, 0} - }; - - // Set default settings - Bela_defaultSettings(settings); - settings->setup = setup; - settings->render = render; - settings->cleanup = cleanup; - if(argc > 0 && argv[0]) - { - char* nameWithSlash = strrchr(argv[0], '/'); - settings->projectName = nameWithSlash ? nameWithSlash + 1 : argv[0]; - } - - while (1) { - int c = Bela_getopt_long(argc, argv, "h", customOptions, settings); - if (c < 0) - { - break; - } - int ret = -1; - switch (c) { - case 'h': - usage(basename(argv[0])); - ret = 0; - break; - default: - usage(basename(argv[0])); - ret = 1; - break; - } - if(ret >= 0) - { - Bela_InitSettings_free(settings); - return ret; - } - } - - // Initialise the PRU audio device - if(Bela_initAudio(settings, 0) != 0) { - Bela_InitSettings_free(settings); - fprintf(stderr,"Error: unable to initialise audio\n"); - return 1; - } - Bela_InitSettings_free(settings); - - // Set up interrupt handler to catch Control-C and SIGTERM - signal(SIGINT, interrupt_handler); - signal(SIGTERM, interrupt_handler); - - // Start the audio device running - if(Bela_startAudio()) { - fprintf(stderr,"Error: unable to start real-time audio\n"); - // Stop the audio device - Bela_stopAudio(); - // Clean up any resources allocated for audio - Bela_cleanupAudio(); - return 1; - } - - // Run until told to stop - while(!Bela_stopRequested()) { - usleep(100000); - } - - // Stop the audio device - Bela_stopAudio(); - - // Clean up any resources allocated for audio - Bela_cleanupAudio(); - - // All done! - return 0; -} diff --git a/examples/embedded/bela/bela-inference/render.cpp b/examples/embedded/bela/bela-inference/render.cpp deleted file mode 100644 index 21c38dd..0000000 --- a/examples/embedded/bela/bela-inference/render.cpp +++ /dev/null @@ -1,106 +0,0 @@ -/* - ____ _____ _ _ -| __ )| ____| | / \ -| _ \| _| | | / _ \ -| |_) | |___| |___ / ___ \ -|____/|_____|_____/_/ \_\ - -http://bela.io - -Anira x Bela Example -*/ - -#include -#include -#include -#include "../SimpleGainConfig.h" - -std::string g_filename = "ts9_test1_out_FP32.wav"; // Name of the sound file (in project folder) -std::vector g_sample_buffer; // Buffer that holds the sound file -int g_read_pointer = 0; // Position of the last frame we played - -anira::InferenceConfig g_inference_config = gain_config; -anira::PrePostProcessor g_pp_processor(g_inference_config); -anira::InferenceHandler g_inference_handler(g_pp_processor, g_inference_config); - -float** audio_data; - -bool setup(BelaContext *context, void *userData) -{ - rt_printf("Anira x Bela Example\n"); - rt_printf("Current buffer size: %d\n", context->audioFrames); - rt_printf("Current sample rate: %d\n", context->audioSampleRate); - - // Allocate memory for the audio data - audio_data = new float*[1]; - audio_data[0] = new float[context->audioFrames]; - - // Load the sample from storage into a buffer - g_sample_buffer = AudioFileUtilities::loadMono(g_filename); - - // Check if the load succeeded - if(g_sample_buffer.size() == 0) { - rt_printf("Error loading audio file '%s'\n", g_filename.c_str()); - return false; - } - - rt_printf("Loaded the audio file '%s' with %d frames (%.1f seconds)\n", - g_filename.c_str(), g_sample_buffer.size(), - g_sample_buffer.size() / context->audioSampleRate); - - // Prepare the inference handler and set the inference backend - g_inference_handler.prepare({(size_t)context->audioFrames, context->audioSampleRate}); - g_inference_handler.set_inference_backend(anira::ONNX); - - // Get the latency introduced by the inference handler (in samples) - int latency = g_inference_handler.get_latency(); - - // This model takes a gain value as additional input parameter - g_pp_processor.set_input(0.5, 1, 0); - - // Some printouts to check - if(g_inference_handler.get_inference_backend() == anira::LIBTORCH) { - rt_printf("Using LibTorch backend\n"); - } else if(g_inference_handler.get_inference_backend() == anira::ONNX) { - rt_printf("Using ONNXRuntime backend\n"); - } else if(g_inference_handler.get_inference_backend() == anira::TFLITE) { - rt_printf("Using TFLite backend\n"); - } else if(g_inference_handler.get_inference_backend() == anira::CUSTOM) { - rt_printf("Using custom backend\n"); - } else { - rt_printf("Backend not selected\n"); - } - rt_printf("Anira introduces a latency of %d samples\n", latency); - - return true; -} - -void render(BelaContext *context, void *userData) -{ - // Read the audio file and write it to the audio data buffer - for(unsigned int n = 0; n < context->audioFrames; n++) { - audio_data[0][n] = g_sample_buffer[g_read_pointer]; - // Increment and wrap the read pointer - g_read_pointer++; - if(g_read_pointer >= g_sample_buffer.size()) { - g_read_pointer = 0; - } - } - - // Process the audio data through the model - g_inference_handler.process(audio_data, context->audioFrames); - - // Write the processed audio data to the audio output - for(unsigned int channel = 0; channel < context->audioInChannels; channel++) { - for(unsigned int n = 0; n < context->audioFrames; n++) { - // Write the sample to every audio output channel - audioWrite(context, n, channel, audio_data[0][n]); - } - } -} - -void cleanup(BelaContext *context, void *userData) -{ - delete[] audio_data[0]; - delete[] audio_data; -} diff --git a/examples/desktop/juce-audio-plugin/CMakeLists.txt b/examples/juce-audio-plugin/CMakeLists.txt similarity index 99% rename from examples/desktop/juce-audio-plugin/CMakeLists.txt rename to examples/juce-audio-plugin/CMakeLists.txt index 9537ae4..005b5cd 100644 --- a/examples/desktop/juce-audio-plugin/CMakeLists.txt +++ b/examples/juce-audio-plugin/CMakeLists.txt @@ -132,7 +132,7 @@ if (MSVC) ) endforeach() - set(JUCE_MANIFEST_GENERATOR_PATH "${CMAKE_BINARY_DIR}/examples/desktop/juce-audio-plugin/${CMAKE_BUILD_TYPE}") + set(JUCE_MANIFEST_GENERATOR_PATH "${CMAKE_BINARY_DIR}/examples/juce-audio-plugin/${CMAKE_BUILD_TYPE}") add_custom_command(TARGET ${TARGET_NAME}_VST3 PRE_BUILD diff --git a/examples/desktop/juce-audio-plugin/PluginParameters.cpp b/examples/juce-audio-plugin/PluginParameters.cpp similarity index 100% rename from examples/desktop/juce-audio-plugin/PluginParameters.cpp rename to examples/juce-audio-plugin/PluginParameters.cpp diff --git a/examples/desktop/juce-audio-plugin/PluginParameters.h b/examples/juce-audio-plugin/PluginParameters.h similarity index 100% rename from examples/desktop/juce-audio-plugin/PluginParameters.h rename to examples/juce-audio-plugin/PluginParameters.h diff --git a/examples/desktop/juce-audio-plugin/PluginProcessor.cpp b/examples/juce-audio-plugin/PluginProcessor.cpp similarity index 100% rename from examples/desktop/juce-audio-plugin/PluginProcessor.cpp rename to examples/juce-audio-plugin/PluginProcessor.cpp diff --git a/examples/desktop/juce-audio-plugin/PluginProcessor.h b/examples/juce-audio-plugin/PluginProcessor.h similarity index 100% rename from examples/desktop/juce-audio-plugin/PluginProcessor.h rename to examples/juce-audio-plugin/PluginProcessor.h diff --git a/examples/desktop/juce-audio-plugin/install.cmake b/examples/juce-audio-plugin/install.cmake similarity index 100% rename from examples/desktop/juce-audio-plugin/install.cmake rename to examples/juce-audio-plugin/install.cmake diff --git a/examples/desktop/minimal-inference/CMakeLists.txt b/examples/minimal-inference/CMakeLists.txt similarity index 100% rename from examples/desktop/minimal-inference/CMakeLists.txt rename to examples/minimal-inference/CMakeLists.txt diff --git a/examples/desktop/minimal-inference/libtorch/CMakeLists.txt b/examples/minimal-inference/libtorch/CMakeLists.txt similarity index 100% rename from examples/desktop/minimal-inference/libtorch/CMakeLists.txt rename to examples/minimal-inference/libtorch/CMakeLists.txt diff --git a/examples/desktop/minimal-inference/libtorch/minimal-libtorch.cpp b/examples/minimal-inference/libtorch/minimal-libtorch.cpp similarity index 100% rename from examples/desktop/minimal-inference/libtorch/minimal-libtorch.cpp rename to examples/minimal-inference/libtorch/minimal-libtorch.cpp diff --git a/examples/desktop/minimal-inference/onnxruntime/CMakeLists.txt b/examples/minimal-inference/onnxruntime/CMakeLists.txt similarity index 100% rename from examples/desktop/minimal-inference/onnxruntime/CMakeLists.txt rename to examples/minimal-inference/onnxruntime/CMakeLists.txt diff --git a/examples/desktop/minimal-inference/onnxruntime/minimal-onnxruntime.cpp b/examples/minimal-inference/onnxruntime/minimal-onnxruntime.cpp similarity index 100% rename from examples/desktop/minimal-inference/onnxruntime/minimal-onnxruntime.cpp rename to examples/minimal-inference/onnxruntime/minimal-onnxruntime.cpp diff --git a/examples/desktop/minimal-inference/tensorflow-lite/CMakeLists.txt b/examples/minimal-inference/tensorflow-lite/CMakeLists.txt similarity index 100% rename from examples/desktop/minimal-inference/tensorflow-lite/CMakeLists.txt rename to examples/minimal-inference/tensorflow-lite/CMakeLists.txt diff --git a/examples/desktop/minimal-inference/tensorflow-lite/minimal-tflite.cpp b/examples/minimal-inference/tensorflow-lite/minimal-tflite.cpp similarity index 100% rename from examples/desktop/minimal-inference/tensorflow-lite/minimal-tflite.cpp rename to examples/minimal-inference/tensorflow-lite/minimal-tflite.cpp