diff --git a/samples/cpp/CMakeLists.txt b/samples/cpp/CMakeLists.txt index 7a7748e6fe1ced..892b331728be53 100644 --- a/samples/cpp/CMakeLists.txt +++ b/samples/cpp/CMakeLists.txt @@ -201,7 +201,7 @@ macro(ov_add_sample) if(c_sample) set(ov_link_libraries openvino::runtime::c) else() - set(ov_link_libraries openvino::runtime) + set(ov_link_libraries) endif() set_target_properties(${SAMPLE_NAME} PROPERTIES FOLDER ${folder_name} diff --git a/samples/cpp/hello_classification/main.cpp b/samples/cpp/hello_classification/main.cpp index 73a06a7692b5a8..b5d130de0dc180 100644 --- a/samples/cpp/hello_classification/main.cpp +++ b/samples/cpp/hello_classification/main.cpp @@ -7,120 +7,75 @@ #include #include #include +#include + +#ifdef _WIN32 +#include +#else +#include +#endif // clang-format off -#include "openvino/openvino.hpp" -#include "samples/args_helper.hpp" -#include "samples/common.hpp" -#include "samples/classification_results.h" -#include "samples/slog.hpp" -#include "format_reader_ptr.h" // clang-format on /** * @brief Main with support Unicode paths, wide strings */ -int tmain(int argc, tchar* argv[]) { - try { - // -------- Set OpenVINO log message capturing callback -------- - const std::function log_callback{[](std::string_view msg) { - slog::info << msg; - }}; - ov::util::set_log_callback(log_callback); - - // -------- Get OpenVINO runtime version -------- - slog::info << ov::get_openvino_version() << slog::endl; - - // -------- Parsing and validation of input arguments -------- - if (argc != 4) { - slog::info << "Usage : " << TSTRING2STRING(argv[0]) << " " - << slog::endl; - return EXIT_FAILURE; - } - - const std::string args = TSTRING2STRING(argv[0]); - const std::string model_path = TSTRING2STRING(argv[1]); - const std::string image_path = TSTRING2STRING(argv[2]); - const std::string device_name = TSTRING2STRING(argv[3]); - - // -------- Step 1. Initialize OpenVINO Runtime Core -------- - ov::Core core; - - // -------- Step 2. Read a model -------- - slog::info << "Loading model files: " << model_path << slog::endl; - std::shared_ptr model = core.read_model(model_path); - printInputAndOutputsInfo(*model); - - OPENVINO_ASSERT(model->inputs().size() == 1, "Sample supports models with 1 input only"); - OPENVINO_ASSERT(model->outputs().size() == 1, "Sample supports models with 1 output only"); - - // -------- Step 3. Set up input - - // Read input image to a tensor and set it to an infer request - // without resize and layout conversions - FormatReader::ReaderPtr reader(image_path.c_str()); - if (reader.get() == nullptr) { - std::stringstream ss; - ss << "Image " + image_path + " cannot be read!"; - throw std::logic_error(ss.str()); - } - - ov::element::Type input_type = ov::element::u8; - ov::Shape input_shape = {1, reader->height(), reader->width(), 3}; - std::shared_ptr input_data = reader->getData(); - - // just wrap image data by ov::Tensor without allocating of new memory - ov::Tensor input_tensor = ov::Tensor(input_type, input_shape, input_data.get()); - - const ov::Layout tensor_layout{"NHWC"}; - - // -------- Step 4. Configure preprocessing -------- - - ov::preprocess::PrePostProcessor ppp(model); - - // 1) Set input tensor information: - // - input() provides information about a single model input - // - reuse precision and shape from already available `input_tensor` - // - layout of data is 'NHWC' - ppp.input().tensor().set_shape(input_shape).set_element_type(input_type).set_layout(tensor_layout); - // 2) Adding explicit preprocessing steps: - // - convert layout to 'NCHW' (from 'NHWC' specified above at tensor layout) - // - apply linear resize from tensor spatial dims to model spatial dims - ppp.input().preprocess().resize(ov::preprocess::ResizeAlgorithm::RESIZE_LINEAR); - // 4) Suppose model has 'NCHW' layout for input - ppp.input().model().set_layout("NCHW"); - // 5) Set output tensor information: - // - precision of tensor is supposed to be 'f32' - ppp.output().tensor().set_element_type(ov::element::f32); - - // 6) Apply preprocessing modifying the original 'model' - model = ppp.build(); - - // -------- Step 5. Loading a model to the device -------- - ov::CompiledModel compiled_model = core.compile_model(model, device_name); - - // -------- Step 6. Create an infer request -------- - ov::InferRequest infer_request = compiled_model.create_infer_request(); - // ----------------------------------------------------------------------------------------------------- - - // -------- Step 7. Prepare input -------- - infer_request.set_input_tensor(input_tensor); - - // -------- Step 8. Do inference synchronously -------- - infer_request.infer(); - - // -------- Step 9. Process output - const ov::Tensor& output_tensor = infer_request.get_output_tensor(); - - // Print classification results - ClassificationResult classification_result(output_tensor, {image_path}); - classification_result.show(); - // ----------------------------------------------------------------------------------------------------- - } catch (const std::exception& ex) { - std::cerr << ex.what() << std::endl; - return EXIT_FAILURE; +int main() { + + std::cout << "Hello Classification Sample" << std::endl; + +#ifdef _WIN32 + // Path to the shared library + const char* lib_path = "openvino.dll"; + + // Load the shared library + std::cout << "Loading shared library: " << lib_path << std::endl; + HMODULE handle = LoadLibraryA(lib_path); + + if (!handle) { + std::cerr << "Error loading library: " << GetLastError() << std::endl; + return 1; } - - return EXIT_SUCCESS; + + std::cout << "Library loaded successfully!" << std::endl; + + // Unload the shared library + std::cout << "Unloading shared library..." << std::endl; + if (!FreeLibrary(handle)) { + std::cerr << "Error unloading library: " << GetLastError() << std::endl; + return 1; + } + + std::cout << "Library unloaded successfully!" << std::endl; +#else + // Path to the shared library + const char* lib_path = "libopenvino.so"; + + // Load the shared library + std::cout << "Loading shared library: " << lib_path << std::endl; + void* handle = dlopen(lib_path, RTLD_LAZY); + + if (!handle) { + std::cerr << "Error loading library: " << dlerror() << std::endl; + return 1; + } + + std::cout << "Library loaded successfully!" << std::endl; + + // Clear any existing error + dlerror(); + + // Unload the shared library + std::cout << "Unloading shared library..." << std::endl; + if (dlclose(handle) != 0) { + std::cerr << "Error unloading library: " << dlerror() << std::endl; + return 1; + } + + std::cout << "Library unloaded successfully!" << std::endl; +#endif + + return 0; } diff --git a/src/cmake/openvino.cmake b/src/cmake/openvino.cmake index ef9be523e6e794..5f44ec720a4337 100644 --- a/src/cmake/openvino.cmake +++ b/src/cmake/openvino.cmake @@ -52,6 +52,7 @@ target_include_directories(${TARGET_NAME} INTERFACE target_link_libraries(${TARGET_NAME} PRIVATE openvino::reference openvino::shape_inference + openvino::shutdown openvino::pugixml ${CMAKE_DL_LIBS} Threads::Threads diff --git a/src/common/CMakeLists.txt b/src/common/CMakeLists.txt index 6cf34afe6e0942..15e2e0220836cf 100644 --- a/src/common/CMakeLists.txt +++ b/src/common/CMakeLists.txt @@ -3,8 +3,10 @@ # add_subdirectory(itt) +add_subdirectory(shutdown) add_subdirectory(conditional_compilation) add_subdirectory(util) +add_subdirectory(test_lib) if(ENABLE_INTEL_CPU) add_subdirectory(snippets) diff --git a/src/common/itt/CMakeLists.txt b/src/common/itt/CMakeLists.txt index 83bd6b72276562..a4310e03010b1e 100644 --- a/src/common/itt/CMakeLists.txt +++ b/src/common/itt/CMakeLists.txt @@ -42,6 +42,9 @@ if(NOT ENABLE_PROFILING_ITT STREQUAL "OFF") endif() endif() +target_include_directories(${TARGET_NAME} PRIVATE + $) + target_include_directories(${TARGET_NAME} PUBLIC $) diff --git a/src/common/itt/src/itt.cpp b/src/common/itt/src/itt.cpp index afc311d6b08eba..e9bdf750d3ded5 100644 --- a/src/common/itt/src/itt.cpp +++ b/src/common/itt/src/itt.cpp @@ -8,6 +8,9 @@ #include #include #include +#include + +#include "shutdown.hpp" #ifdef ENABLE_PROFILING_ITT # include @@ -133,3 +136,12 @@ void regionEnd(domain_t) {} } // namespace internal } // namespace itt } // namespace openvino + + +static void shutdown_itt_resources() { + std::cout << "Shutdown ITT: Releasing ITT resources..." << std::endl; +} + +extern "C" void itt_shutdown() { + shutdown_itt_resources(); +} diff --git a/src/common/shutdown/CMakeLists.txt b/src/common/shutdown/CMakeLists.txt new file mode 100644 index 00000000000000..9d1140897a0b9a --- /dev/null +++ b/src/common/shutdown/CMakeLists.txt @@ -0,0 +1,21 @@ +# Copyright (C) 2026 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 +# + +set(TARGET_NAME openvino_shutdown) + +# Main shutdown library without weak symbols +add_library(${TARGET_NAME} OBJECT shutdown.cpp) +add_library(openvino::shutdown ALIAS ${TARGET_NAME}) +target_include_directories(${TARGET_NAME} PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}) +set_target_properties(${TARGET_NAME} PROPERTIES EXPORT_NAME openvino_shutdown) + + +# Separate library for weak default implementations - must be linked AFTER itt +set(WEAK_TARGET_NAME openvino_shutdown_weak) +add_library(${WEAK_TARGET_NAME} STATIC unload_weak_defs.cpp) +add_library(openvino::shutdown_weak ALIAS ${WEAK_TARGET_NAME}) +target_include_directories(${WEAK_TARGET_NAME} PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}) +set_target_properties(${WEAK_TARGET_NAME} PROPERTIES EXPORT_NAME openvino_shutdown_weak) + +target_link_libraries(${TARGET_NAME} PRIVATE ${WEAK_TARGET_NAME}) diff --git a/src/common/shutdown/shutdown.cpp b/src/common/shutdown/shutdown.cpp new file mode 100644 index 00000000000000..c0108080273708 --- /dev/null +++ b/src/common/shutdown/shutdown.cpp @@ -0,0 +1,43 @@ +// Copyright (C) 2026 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// +#include "shutdown.hpp" + +#include +#include +#include + +namespace { +void call_on_unload() { + itt_shutdown(); + test_lib_shutdown(); +} +} // namespace init_registry + + + +#if defined(_WIN32) && !defined(__MINGW32__) && !defined(__MINGW64__) +# include +BOOL WINAPI DllMain(HINSTANCE hinstDLL, // handle to DLL module + DWORD fdwReason, // reason for calling function + LPVOID lpReserved) // reserved +{ + // Perform actions based on the reason for calling. + switch (fdwReason) { + case DLL_PROCESS_ATTACH: + case DLL_THREAD_ATTACH: + case DLL_THREAD_DETACH: + break; + + case DLL_PROCESS_DETACH: + call_on_unload(); + break; + } + return TRUE; // Successful DLL_PROCESS_ATTACH. +} +#elif defined(__linux__) || defined(__APPLE__) || defined(__EMSCRIPTEN__) +extern "C" __attribute__((destructor)) void library_unload(); +void library_unload() { + call_on_unload(); +} +#endif diff --git a/src/common/shutdown/shutdown.hpp b/src/common/shutdown/shutdown.hpp new file mode 100644 index 00000000000000..c3bcde932cfb13 --- /dev/null +++ b/src/common/shutdown/shutdown.hpp @@ -0,0 +1,27 @@ +// Copyright (C) 2026 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#pragma once + +namespace init_registry { +} // namespace init_registry + +#if defined(_MSC_VER) + +extern "C" void itt_shutdown(); +#pragma comment(linker, "/alternatename:itt_shutdown=itt_shutdown_default") + +extern "C" void test_lib_shutdown(); +#pragma comment(linker, "/alternatename:test_lib_shutdown=test_lib_shutdown_default") + +#elif defined(__GNUC__) || defined(__clang__) +extern "C" void itt_shutdown(); +extern "C" void test_lib_shutdown(); + + +#else +# error "Compiler not supported" +#endif + + diff --git a/src/common/shutdown/unload_weak_defs.cpp b/src/common/shutdown/unload_weak_defs.cpp new file mode 100644 index 00000000000000..a9f5d6f311bbaa --- /dev/null +++ b/src/common/shutdown/unload_weak_defs.cpp @@ -0,0 +1,34 @@ +// Copyright (C) 2026 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// +#include "shutdown.hpp" + +#include + + + +#if defined(_MSC_VER) + +extern "C" void itt_shutdown_default() { + std::cout << "Default itt_shutdown called: no ITT resources to release." << std::endl; +} + +extern "C" void test_lib_shutdown_default() { + std::cout << "Default test_lib_shutdown called: no test library resources to release." << std::endl; +} + +#elif defined(__GNUC__) || defined(__clang__) +extern "C" void itt_shutdown() __attribute__((weak)); + +extern "C" void itt_shutdown() { + std::cout << "Default itt_shutdown called: no ITT resources to release." << std::endl; +} + +extern "C" void test_lib_shutdown() __attribute__((weak)); + +extern "C" void test_lib_shutdown() { + std::cout << "Default test_lib_shutdown called: no test library resources to release." << std::endl; +} +#else +# error "Compiler not supported" +#endif diff --git a/src/common/test_lib/CMakeLists.txt b/src/common/test_lib/CMakeLists.txt new file mode 100644 index 00000000000000..74f38bcc9eaf03 --- /dev/null +++ b/src/common/test_lib/CMakeLists.txt @@ -0,0 +1,17 @@ +# Copyright (C) 2018-2026 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 +# + +set(TARGET_NAME test_lib) + +add_library(${TARGET_NAME} STATIC test_lib.cpp) +target_include_directories(${TARGET_NAME} PRIVATE + $) + +set_target_properties(${TARGET_NAME} PROPERTIES EXPORT_NAME test_lib) + +ov_install_static_lib(${TARGET_NAME} ${OV_CPACK_COMP_CORE}) + +ov_developer_package_export_targets(TARGET test_lib + INSTALL_INCLUDE_DIRECTORIES "${CMAKE_CURRENT_SOURCE_DIR}/include/") + diff --git a/src/common/test_lib/test_lib.cpp b/src/common/test_lib/test_lib.cpp new file mode 100644 index 00000000000000..a0e5e2a93bfc04 --- /dev/null +++ b/src/common/test_lib/test_lib.cpp @@ -0,0 +1,8 @@ +#include + + +#include "shutdown.hpp" + +extern "C" void test_lib_shutdown() { + std::cout << "Test library shutdown: Releasing test library resources..." << std::endl; +} diff --git a/src/core/shape_inference/CMakeLists.txt b/src/core/shape_inference/CMakeLists.txt index 73835aa5b08fb6..3e6f193017f51b 100644 --- a/src/core/shape_inference/CMakeLists.txt +++ b/src/core/shape_inference/CMakeLists.txt @@ -26,6 +26,8 @@ target_include_directories(${TARGET_NAME} PUBLIC $ $) +#target_link_libraries(${TARGET_NAME} PRIVATE test_lib) + ov_add_clang_format_target(${TARGET_NAME}_clang FOR_TARGETS ${TARGET_NAME}) if(NOT BUILD_SHARED_LIBS)