Skip to content

Commit c50f46c

Browse files
committed
llama.cpp updated
1 parent 28a3b38 commit c50f46c

File tree

2 files changed

+7
-7
lines changed

2 files changed

+7
-7
lines changed

llama_ros/llama_cpp

llama_ros/src/llama_node.cpp

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -203,20 +203,20 @@ void LlamaNode::load_params(struct gpt_params &params) {
203203

204204
// split mode
205205
if (split_mode == "none") {
206-
params.split_mode = LLAMA_SPLIT_NONE;
206+
params.split_mode = LLAMA_SPLIT_MODE_NONE;
207207
} else if (split_mode == "layer") {
208-
params.split_mode = LLAMA_SPLIT_LAYER;
208+
params.split_mode = LLAMA_SPLIT_MODE_LAYER;
209209
} else if (split_mode == "row") {
210-
params.split_mode = LLAMA_SPLIT_ROW;
210+
params.split_mode = LLAMA_SPLIT_MODE_ROW;
211211
}
212212

213213
// rope_scaling_type
214214
if (rope_scaling_type == "none") {
215-
params.rope_scaling_type = LLAMA_ROPE_SCALING_NONE;
215+
params.rope_scaling_type = LLAMA_ROPE_SCALING_TYPE_NONE;
216216
} else if (rope_scaling_type == "linear") {
217-
params.rope_scaling_type = LLAMA_ROPE_SCALING_LINEAR;
217+
params.rope_scaling_type = LLAMA_ROPE_SCALING_TYPE_LINEAR;
218218
} else if (rope_scaling_type == "yarn") {
219-
params.rope_scaling_type = LLAMA_ROPE_SCALING_YARN;
219+
params.rope_scaling_type = LLAMA_ROPE_SCALING_TYPE_YARN;
220220
}
221221

222222
// numa

0 commit comments

Comments
 (0)