From 77f9756974054f6670048197385847268b9d1206 Mon Sep 17 00:00:00 2001 From: jiqing-feng <107918818+jiqing-feng@users.noreply.github.com> Date: Mon, 8 Jan 2024 17:51:19 +0800 Subject: [PATCH] Add ipex inference llama test (#503) * add llamd and bloom in ipex inference tests * only add llama tests --- tests/ipex/test_inference.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/ipex/test_inference.py b/tests/ipex/test_inference.py index 6889e76af2..ef732ea9bd 100644 --- a/tests/ipex/test_inference.py +++ b/tests/ipex/test_inference.py @@ -41,6 +41,7 @@ "gpt_neo": "hf-internal-testing/tiny-random-GPTNeoModel", "gpt_neox": "hf-internal-testing/tiny-random-GPTNeoXForCausalLM", "gpt_bigcode": "hf-internal-testing/tiny-random-GPTBigCodeModel", + "llama": "fxmarty/tiny-llama-fast-tokenizer", } _CLASSIFICATION_TASK_TO_AUTOMODELS = { @@ -56,7 +57,7 @@ class IPEXIntegrationTest(unittest.TestCase): "roberta", ) - TEXT_GENERATION_SUPPORTED_ARCHITECTURES = ("gptj", "gpt2", "gpt_neo", "gpt_bigcode") + TEXT_GENERATION_SUPPORTED_ARCHITECTURES = ("gptj", "gpt2", "gpt_neo", "gpt_bigcode", "llama") QA_SUPPORTED_ARCHITECTURES = ( "bert",