Skip to content

Commit

Permalink
add phi unit-test (#10)
Browse files Browse the repository at this point in the history
  • Loading branch information
Ziha0-Zhang authored Jul 30, 2024
1 parent 2be49d9 commit ab6b3b3
Showing 1 changed file with 20 additions and 0 deletions.
20 changes: 20 additions & 0 deletions tests/test_moe_layer.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import torch
from transformers.models.llama.modeling_llama import LlamaConfig, LlamaMLP
from transformers.models.phi3.modeling_phi3 import Phi3Config, Phi3MLP
from transformers.models.phi.modeling_phi import PhiConfig, PhiMLP

from mixlora.model import LoraLinear, MixLoraConfig, MixLoraSparseMoe

Expand Down Expand Up @@ -72,6 +73,25 @@ def test_llama_forward(self):
input = torch.zeros(shape)
output: torch.Tensor = moe_layer(input)
self.assertEqual(output.shape, shape)

def test_phi_forward(self):
mlp_layer = PhiMLP(
PhiConfig(
vocab_size=128,
hidden_size=hidden_size,
intermediate_size=hidden_size * 2,
num_hidden_layers=8,
num_attention_heads=2,
)
)
moe_layer = dummy_moe_layer(
"phi", mlp_layer, hidden_size, ["fc1", "fc2"]
)
for shape in dummy_test_shapes(hidden_size):
with self.subTest(f"test for shape = {shape}"):
input = torch.zeros(shape)
output: torch.Tensor = moe_layer(input)
self.assertEqual(output.shape, shape)

def test_phi3_forward(self):
mlp_layer = Phi3MLP(
Expand Down

0 comments on commit ab6b3b3

Please sign in to comment.