Skip to content

Commit

Permalink
Correct path with stateless models
Browse files Browse the repository at this point in the history
  • Loading branch information
eshiryae committed Jan 15, 2025
1 parent de37585 commit 123e1c4
Showing 1 changed file with 13 additions and 14 deletions.
27 changes: 13 additions & 14 deletions tests/python_tests/test_whisper_pipeline_static.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,17 +8,20 @@
import openvino_tokenizers
import openvino
import pytest
import pathlib

# This test suite is designed specifically to validate the functionality
# and robustness of the WhisperStaticPipeline on NPUW:CPU.
config = {"NPU_USE_NPUW" : "YES",
"NPUW_DEVICES" : "CPU",
"NPUW_ONLINE_PIPELINE" : "NONE"}

def load_and_save_whisper_model(params, **tokenizer_kwargs):
def load_and_save_whisper_model(params, stateful=False, **tokenizer_kwargs):
model_id, path = params

processor = WhisperProcessor.from_pretrained(model_id, trust_remote_code=True)
if not stateful:
path = pathlib.Path(f"{path}_with_past")

if not (path / "openvino_encoder_model.xml").exists():
tokenizer = AutoTokenizer.from_pretrained(model_id, trust_remote_code=True)
Expand All @@ -39,15 +42,17 @@ def load_and_save_whisper_model(params, **tokenizer_kwargs):
model_id,
export=True,
trust_remote_code=True,
stateful=stateful,
compile=False,
device="CPU",
load_in_8bit=False,
stateful=False,
)
opt_model.generation_config.save_pretrained(path)
opt_model.config.save_pretrained(path)
opt_model.save_pretrained(path)
processor.save_pretrained(path)

return model_id, path

def get_results_cpu_npu(model_path, audio_sample, **config_kwargs):
cpu_pipe = ov_genai.WhisperPipeline(model_path, "CPU")
Expand All @@ -72,8 +77,7 @@ def compare_results_with_assert(expected, actual_out):
@pytest.mark.parametrize("test_sample", get_samples_from_dataset(language="en", length=1))
@pytest.mark.precommit
def test_static_whisper_generation_compare_with_cpu(model_descr, test_sample):
model_id, model_path = model_descr
load_and_save_whisper_model(model_descr)
model_id, model_path = load_and_save_whisper_model(model_descr)

expected, actual_out = get_results_cpu_npu(model_path, test_sample)

Expand All @@ -89,8 +93,7 @@ def test_static_whisper_generation_compare_with_cpu(model_descr, test_sample):
],)
@pytest.mark.precommit
def test_static_whisper_autodetect(model_descr, test_sample):
model_id, model_path = model_descr
load_and_save_whisper_model(model_descr)
model_id, model_path = load_and_save_whisper_model(model_descr)

expected, actual_out = get_results_cpu_npu(model_path, test_sample)

Expand All @@ -103,8 +106,7 @@ def test_static_whisper_autodetect(model_descr, test_sample):
)
@pytest.mark.precommit
def test_static_whisper_language_de(model_descr, test_sample):
model_id, model_path = model_descr
load_and_save_whisper_model(model_descr)
model_id, model_path = load_and_save_whisper_model(model_descr)

expected, actual_out = get_results_cpu_npu(model_path, test_sample, max_new_tokens=30, language="<|de|>")

Expand All @@ -117,8 +119,7 @@ def test_static_whisper_language_de(model_descr, test_sample):
)
@pytest.mark.precommit
def test_static_whisper_language_fr(model_descr, test_sample):
model_id, model_path = model_descr
load_and_save_whisper_model(model_descr)
model_id, model_path = load_and_save_whisper_model(model_descr)

expected, actual_out = get_results_cpu_npu(model_path, test_sample, max_new_tokens=30, language="<|fr|>")

Expand All @@ -131,8 +132,7 @@ def test_static_whisper_language_fr(model_descr, test_sample):
)
@pytest.mark.precommit
def test_static_whisper_language_ru(model_descr, test_sample):
model_id, model_path = model_descr
load_and_save_whisper_model(model_descr)
model_id, model_path = load_and_save_whisper_model(model_descr)

expected, actual_out = get_results_cpu_npu(model_path, test_sample, max_new_tokens=30, language="<|ru|>")

Expand All @@ -143,8 +143,7 @@ def test_static_whisper_language_ru(model_descr, test_sample):
@pytest.mark.parametrize("test_sample", get_samples_from_dataset(language="en", length=1, long_form=True))
@pytest.mark.precommit
def test_static_whisper_generation_long(model_descr, test_sample):
model_id, model_path = model_descr
load_and_save_whisper_model(model_descr)
model_id, model_path = load_and_save_whisper_model(model_descr)

expected, actual_out = get_results_cpu_npu(model_path, test_sample)

Expand Down

0 comments on commit 123e1c4

Please sign in to comment.