diff --git a/optimum/intel/openvino/modeling_base.py b/optimum/intel/openvino/modeling_base.py index 685518f6a5..32b6b02377 100644 --- a/optimum/intel/openvino/modeling_base.py +++ b/optimum/intel/openvino/modeling_base.py @@ -30,7 +30,7 @@ from ...exporters.openvino import export, main_export from ..utils.import_utils import is_nncf_available, is_transformers_version -from .utils import ONNX_WEIGHTS_NAME, OV_XML_FILE_NAME +from .utils import ONNX_WEIGHTS_NAME, OV_XML_FILE_NAME, _print_compiled_model_properties if is_transformers_version("<", "4.25.0"): @@ -350,6 +350,10 @@ def compile(self): ov_config["CACHE_DIR"] = str(cache_dir) logger.info(f"Setting OpenVINO CACHE_DIR to {str(cache_dir)}") self.request = core.compile_model(self.model, self._device, ov_config) + # OPENVINO_LOG_LEVEL can be found in https://docs.openvino.ai/2023.2/openvino_docs_OV_UG_supported_plugins_AUTO_debugging.html + if "OPENVINO_LOG_LEVEL" in os.environ and int(os.environ["OPENVINO_LOG_LEVEL"]) > 2: + logger.info(f"{self._device} SUPPORTED_PROPERTIES:") + _print_compiled_model_properties(self.request) def _reshape( self, diff --git a/optimum/intel/openvino/modeling_diffusion.py b/optimum/intel/openvino/modeling_diffusion.py index 12e8fc213c..8c1681982b 100644 --- a/optimum/intel/openvino/modeling_diffusion.py +++ b/optimum/intel/openvino/modeling_diffusion.py @@ -55,7 +55,7 @@ from ...exporters.openvino import main_export from .loaders import OVTextualInversionLoaderMixin from .modeling_base import OVBaseModel -from .utils import ONNX_WEIGHTS_NAME, OV_TO_NP_TYPE, OV_XML_FILE_NAME +from .utils import ONNX_WEIGHTS_NAME, OV_TO_NP_TYPE, OV_XML_FILE_NAME, _print_compiled_model_properties core = Core() @@ -544,6 +544,10 @@ def _compile(self): if self.request is None: logger.info(f"Compiling the {self._model_name} to {self.device} ...") self.request = core.compile_model(self.model, self.device, self.ov_config) + # OPENVINO_LOG_LEVEL can be found in https://docs.openvino.ai/2023.2/openvino_docs_OV_UG_supported_plugins_AUTO_debugging.html + if "OPENVINO_LOG_LEVEL" in os.environ and int(os.environ["OPENVINO_LOG_LEVEL"]) > 2: + logger.info(f"{self.device} SUPPORTED_PROPERTIES:") + _print_compiled_model_properties(self.request) @property def device(self): diff --git a/optimum/intel/openvino/modeling_seq2seq.py b/optimum/intel/openvino/modeling_seq2seq.py index d43dbf3427..9a7f913ab2 100644 --- a/optimum/intel/openvino/modeling_seq2seq.py +++ b/optimum/intel/openvino/modeling_seq2seq.py @@ -14,6 +14,7 @@ import copy import logging +import os from pathlib import Path from tempfile import gettempdir from typing import TYPE_CHECKING, Dict, Optional, Tuple, Union @@ -37,6 +38,7 @@ from ..utils.import_utils import is_transformers_version from .modeling_base_seq2seq import OVBaseModelForSeq2SeqLM +from .utils import _print_compiled_model_properties if is_transformers_version("<", "4.25.0"): @@ -472,6 +474,10 @@ def _compile(self): if self.request is None: logger.info(f"Compiling the encoder to {self._device} ...") self.request = core.compile_model(self.model, self._device, self.ov_config) + # OPENVINO_LOG_LEVEL can be found in https://docs.openvino.ai/2023.2/openvino_docs_OV_UG_supported_plugins_AUTO_debugging.html + if "OPENVINO_LOG_LEVEL" in os.environ and int(os.environ["OPENVINO_LOG_LEVEL"]) > 2: + logger.info(f"{self._device} SUPPORTED_PROPERTIES:") + _print_compiled_model_properties(self.request) class OVDecoder: @@ -570,7 +576,12 @@ def __call__(self, *args, **kwargs): def _compile(self): if self.request is None: logger.info(f"Compiling the decoder to {self._device} ...") - self.request = core.compile_model(self.model, self._device, self.ov_config).create_infer_request() + compiled_model = core.compile_model(self.model, self._device, self.ov_config) + self.request = compiled_model.create_infer_request() + # OPENVINO_LOG_LEVEL can be found in https://docs.openvino.ai/2023.2/openvino_docs_OV_UG_supported_plugins_AUTO_debugging.html + if "OPENVINO_LOG_LEVEL" in os.environ and int(os.environ["OPENVINO_LOG_LEVEL"]) > 2: + logger.info(f"{self._device} SUPPORTED_PROPERTIES:") + _print_compiled_model_properties(compiled_model) @add_start_docstrings( diff --git a/optimum/intel/openvino/utils.py b/optimum/intel/openvino/utils.py index c05ba9e374..9548c750bf 100644 --- a/optimum/intel/openvino/utils.py +++ b/optimum/intel/openvino/utils.py @@ -14,15 +14,18 @@ import json +import logging import os from glob import glob import numpy as np from huggingface_hub import model_info -from openvino.runtime import Type +from openvino.runtime import Type, properties from transformers.onnx.utils import ParameterFormat, compute_serialized_parameters_size +logger = logging.getLogger(__name__) + OV_XML_FILE_NAME = "openvino_model.xml" OV_ENCODER_NAME = "openvino_encoder_model.xml" OV_DECODER_NAME = "openvino_decoder_model.xml" @@ -123,3 +126,19 @@ def _is_timm_ov_dir(model_dir): if hf_hub_id and model_info(hf_hub_id).library_name == "timm": return True return False + + +def _print_compiled_model_properties(compiled_model): + supported_properties = properties.supported_properties() + skip_keys = {"SUPPORTED_METRICS", "SUPPORTED_CONFIG_KEYS", supported_properties} + keys = set(compiled_model.get_property(supported_properties)) - skip_keys + for k in keys: + value = compiled_model.get_property(k) + if k == properties.device.properties(): + for device_key in value.keys(): + logger.info(f" {device_key}:") + for k2, value2 in value.get(device_key).items(): + if k2 not in skip_keys: + logger.info(f" {k2}: {value2}") + else: + logger.info(f" {k}: {value}")