Skip to content

Commit 8e9dc49

Browse files
committed
use llama 3.1 for test
Signed-off-by: Sertac Ozercan <sozercan@gmail.com>
1 parent ec787eb commit 8e9dc49

File tree

2 files changed

+7
-4
lines changed

2 files changed

+7
-4
lines changed

pkg/finetune/target_unsloth.py

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,7 @@
11
#!/usr/bin/env python3
22

3+
from unsloth import is_bfloat16_supported
4+
from transformers import TrainingArguments, DataCollatorForSeq2Seq
35
from unsloth import FastLanguageModel
46
import torch
57
from trl import SFTTrainer
@@ -71,13 +73,14 @@ def formatting_prompts_func(examples):
7173
else:
7274
dataset = load_dataset(source, split = "train")
7375

74-
dataset = dataset.map(formatting_prompts_func, batched = True)
76+
dataset = dataset.map(formatting_prompts_func, batched=True)
7577

7678
trainer = SFTTrainer(
7779
model=model,
7880
train_dataset=dataset,
7981
dataset_text_field="text",
8082
max_seq_length=max_seq_length,
83+
data_collator=DataCollatorForSeq2Seq(tokenizer=tokenizer),
8184
tokenizer=tokenizer,
8285
dataset_num_proc = 2,
8386
packing = cfg.get('packing'), # Can make training 5x faster for short sequences.
@@ -87,8 +90,8 @@ def formatting_prompts_func(examples):
8790
warmup_steps=cfg.get('warmupSteps'),
8891
max_steps=cfg.get('maxSteps'),
8992
learning_rate = cfg.get('learningRate'),
90-
fp16=not torch.cuda.is_bf16_supported(),
91-
bf16=torch.cuda.is_bf16_supported(),
93+
fp16=not is_bfloat16_supported(),
94+
bf16=is_bfloat16_supported(),
9295
logging_steps=cfg.get('loggingSteps'),
9396
optim=cfg.get('optimizer'),
9497
weight_decay = cfg.get('weightDecay'),

test/aikitfile-unsloth.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
#syntax=aikit:test
22
apiVersion: v1alpha1
3-
baseModel: unsloth/llama-3-8b-bnb-4bit
3+
baseModel: unsloth/Meta-Llama-3.1-8B
44
datasets:
55
- source: "yahma/alpaca-cleaned"
66
type: alpaca

0 commit comments

Comments
 (0)