diff --git a/paddlex/inference/models/base/predictor/base_predictor.py b/paddlex/inference/models/base/predictor/base_predictor.py index 139661fa2f..af88b3bf15 100644 --- a/paddlex/inference/models/base/predictor/base_predictor.py +++ b/paddlex/inference/models/base/predictor/base_predictor.py @@ -165,11 +165,11 @@ def __init__( else: self._use_static_model = True if self._use_static_model: - if not use_hpip: - self._pp_option = self._prepare_pp_option(pp_option, device) - else: + self._pp_option = self._prepare_pp_option(pp_option, device) + if use_hpip: require_hpip() self._hpi_config = self._prepare_hpi_config(hpi_config, device) + self.sync_threads() else: self._use_hpip = False self._use_static_model = False @@ -475,6 +475,15 @@ def _prepare_hpi_config( return hpi_config + def sync_threads(self): + if self._pp_option and self._pp_option.cpu_threads: + # If the user specified threads in the old system, + # force it into the new HPI system + if self._hpi_config.backend_config is None: + self._hpi_config.backend_config = {} + if "cpu_num_threads" not in self._hpi_config.backend_config: + self._hpi_config.backend_config["cpu_num_threads"] = self._pp_option.cpu_threads + # Should this be static? def _get_device_info(self, device): if device is None: diff --git a/paddlex/inference/utils/hpi.py b/paddlex/inference/utils/hpi.py index ec8e958654..4f28dfa649 100644 --- a/paddlex/inference/utils/hpi.py +++ b/paddlex/inference/utils/hpi.py @@ -16,6 +16,7 @@ import importlib.resources import importlib.util import json +import os import platform from collections import defaultdict from functools import lru_cache @@ -58,11 +59,11 @@ class HPIInfo(BaseModel): class OpenVINOConfig(BaseModel): - cpu_num_threads: int = 10 + cpu_num_threads: int = Field(default_factory=lambda: int(os.getenv("CPU_NUM_THREADS", 10))) class ONNXRuntimeConfig(BaseModel): - cpu_num_threads: int = 10 + cpu_num_threads: int = Field(default_factory=lambda: int(os.getenv("CPU_NUM_THREADS", 10))) class TensorRTConfig(BaseModel): diff --git a/paddlex/inference/utils/pp_option.py b/paddlex/inference/utils/pp_option.py index 366174c7c9..8ae4e4c663 100644 --- a/paddlex/inference/utils/pp_option.py +++ b/paddlex/inference/utils/pp_option.py @@ -134,7 +134,7 @@ def _get_default_config(self, model_name): "run_mode": get_default_run_mode(model_name, device_type), "device_type": device_type, "device_id": device_id, - "cpu_threads": 10, + "cpu_threads": int(os.getenv("CPU_NUM_THREADS", 10)), "delete_pass": [], "enable_new_ir": True if model_name not in NEWIR_BLOCKLIST else False, "enable_cinn": False,