Skip to content

Commit fe8b7e7

Browse files
committed
.
1 parent eaed226 commit fe8b7e7

File tree

2 files changed

+2
-1
lines changed

2 files changed

+2
-1
lines changed

configs/local_setup.yml

+1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
# Suggested data paths when using GPT-NeoX locally
22
{
33
"global_num_gpus":1,
4+
"num_gpus": 1,
45
"data-path": "data/enwik8/enwik8_text_document",
56

67
# or for weighted datasets:

megatron/training.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -613,7 +613,7 @@ def setup_model_and_optimizer(neox_args, use_cache=False, iteration=None):
613613

614614
params=neox_args.deepspeed_config
615615
params["gradient_accumulation_steps"]=1 # JED figure out what is broken
616-
params["train_batch_size"]=16 # JED figure out what is broken
616+
#params["train_batch_size"]=16 # JED figure out what is broken
617617

618618

619619
model, optimizer, _, lr_scheduler = deepspeed.initialize(

0 commit comments

Comments
 (0)