@@ -62,7 +62,7 @@ def createFinetuneRequest(
6262 learning_rate : float | None = 0.00001 ,
6363 lr_scheduler_type : Literal ["linear" , "cosine" ] = "linear" ,
6464 min_lr_ratio : float = 0.0 ,
65- num_cycles : float = 0.5 ,
65+ scheduler_num_cycles : float = 0.5 ,
6666 warmup_ratio : float = 0.0 ,
6767 max_grad_norm : float = 1.0 ,
6868 weight_decay : float = 0.0 ,
@@ -138,12 +138,12 @@ def createFinetuneRequest(
138138 lrScheduler : FinetuneLRScheduler = FinetuneLRScheduler (lr_scheduler_type = "linear" )
139139
140140 if lr_scheduler_type == "cosine" :
141- if num_cycles <= 0.0 :
141+ if scheduler_num_cycles <= 0.0 :
142142 raise ValueError ("Number of cycles should be greater than 0" )
143143
144144 lrScheduler = FinetuneCosineLRScheduler (
145145 lr_scheduler_args = FinetuneCosineLRSchedulerArgs (
146- min_lr_ratio = min_lr_ratio , num_cycles = num_cycles
146+ min_lr_ratio = min_lr_ratio , num_cycles = scheduler_num_cycles
147147 ),
148148 )
149149 else :
@@ -263,7 +263,7 @@ def create(
263263 learning_rate : float | None = 0.00001 ,
264264 lr_scheduler_type : Literal ["linear" , "cosine" ] = "linear" ,
265265 min_lr_ratio : float = 0.0 ,
266- num_cycles : float = 0.5 ,
266+ scheduler_num_cycles : float = 0.5 ,
267267 warmup_ratio : float = 0.0 ,
268268 max_grad_norm : float = 1.0 ,
269269 weight_decay : float = 0.0 ,
@@ -301,7 +301,7 @@ def create(
301301 lr_scheduler_type (Literal["linear", "cosine"]): Learning rate scheduler type. Defaults to "linear".
302302 min_lr_ratio (float, optional): Min learning rate ratio of the initial learning rate for
303303 the learning rate scheduler. Defaults to 0.0.
304- num_cycles (float, optional): Number or fraction of cycles for the cosine learning rate scheduler. Defaults to 0.5.
304+ scheduler_num_cycles (float, optional): Number or fraction of cycles for the cosine learning rate scheduler. Defaults to 0.5.
305305 warmup_ratio (float, optional): Warmup ratio for the learning rate scheduler.
306306 max_grad_norm (float, optional): Max gradient norm. Defaults to 1.0, set to 0 to disable.
307307 weight_decay (float, optional): Weight decay. Defaults to 0.0.
@@ -359,7 +359,7 @@ def create(
359359 learning_rate = learning_rate ,
360360 lr_scheduler_type = lr_scheduler_type ,
361361 min_lr_ratio = min_lr_ratio ,
362- num_cycles = num_cycles ,
362+ scheduler_num_cycles = scheduler_num_cycles ,
363363 warmup_ratio = warmup_ratio ,
364364 max_grad_norm = max_grad_norm ,
365365 weight_decay = weight_decay ,
@@ -642,7 +642,7 @@ async def create(
642642 learning_rate : float | None = 0.00001 ,
643643 lr_scheduler_type : Literal ["linear" , "cosine" ] = "linear" ,
644644 min_lr_ratio : float = 0.0 ,
645- num_cycles : float = 0.5 ,
645+ scheduler_num_cycles : float = 0.5 ,
646646 warmup_ratio : float = 0.0 ,
647647 max_grad_norm : float = 1.0 ,
648648 weight_decay : float = 0.0 ,
@@ -680,7 +680,7 @@ async def create(
680680 lr_scheduler_type (Literal["linear", "cosine"]): Learning rate scheduler type. Defaults to "linear".
681681 min_lr_ratio (float, optional): Min learning rate ratio of the initial learning rate for
682682 the learning rate scheduler. Defaults to 0.0.
683- num_cycles (float, optional): Number or fraction of cycles for the cosine learning rate scheduler. Defaults to 0.5.
683+ scheduler_num_cycles (float, optional): Number or fraction of cycles for the cosine learning rate scheduler. Defaults to 0.5.
684684 warmup_ratio (float, optional): Warmup ratio for the learning rate scheduler.
685685 max_grad_norm (float, optional): Max gradient norm. Defaults to 1.0, set to 0 to disable.
686686 weight_decay (float, optional): Weight decay. Defaults to 0.0.
@@ -739,7 +739,7 @@ async def create(
739739 learning_rate = learning_rate ,
740740 lr_scheduler_type = lr_scheduler_type ,
741741 min_lr_ratio = min_lr_ratio ,
742- num_cycles = num_cycles ,
742+ scheduler_num_cycles = scheduler_num_cycles ,
743743 warmup_ratio = warmup_ratio ,
744744 max_grad_norm = max_grad_norm ,
745745 weight_decay = weight_decay ,
0 commit comments