Skip to content

Commit

Permalink
Apply suggestions from code review
Browse files Browse the repository at this point in the history
Co-authored-by: Ella Charlaix <80481427+echarlaix@users.noreply.github.com>
  • Loading branch information
faaany and echarlaix authored Jun 6, 2024
1 parent 1ef8d56 commit 5f5d205
Show file tree
Hide file tree
Showing 2 changed files with 1 addition and 9 deletions.
2 changes: 1 addition & 1 deletion optimum/intel/utils/modeling_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -184,7 +184,7 @@ def recursive_to_device(value, device):
return value


def setattr_from_module(new_module, module):
def _setattr_from_module(new_module, module):
for k, v in module.__dict__.items():
setattr(new_module, k, v)
for k, v in module.__class__.__dict__.items():
Expand Down
8 changes: 0 additions & 8 deletions tests/ipex/test_modeling.py
Original file line number Diff line number Diff line change
Expand Up @@ -242,10 +242,6 @@ def test_pipeline(self, model_arch):

# High optimized model llama is not supported assisted decoding for now.
@parameterized.expand(SUPPORTED_ARCHITECTURES)
@unittest.skipIf(
is_ipex_version(">=", "2.3.0"),
reason="CPU IPEXModel does not support assisted decoding when ipex version >= 2.3.0",
)
def test_assisted_decoding(self, model_arch):
if model_arch == "llama2":
return
Expand Down Expand Up @@ -301,10 +297,6 @@ def test_ipex_patching_beam_search(self, test_name, model_arch, use_cache):
self.assertIsInstance(outputs, torch.Tensor)
self.assertTrue(torch.equal(outputs, transformers_outputs))

@unittest.skipIf(
is_ipex_version(">=", "2.3.0"),
reason="CPU IPEXModel only supports with past_key_values for ipex version >= 2.3.0",
)
def test_compare_with_and_without_past_key_values(self):
model_id = "echarlaix/tiny-random-gpt2-torchscript"
tokenizer = AutoTokenizer.from_pretrained(model_id)
Expand Down

0 comments on commit 5f5d205

Please sign in to comment.