Skip to content

Commit

Permalink
fix: Fix typo in Llama3VisionAlphaChatHandler. Closes abetlen#1488
Browse files Browse the repository at this point in the history
  • Loading branch information
abetlen committed May 29, 2024
1 parent 91d05ab commit 165b4dc
Showing 1 changed file with 5 additions and 2 deletions.
7 changes: 5 additions & 2 deletions llama_cpp/llama_chat_format.py
Original file line number Diff line number Diff line change
Expand Up @@ -3098,7 +3098,7 @@ class NanoLlavaChatHandler(Llava15ChatHandler):
"{% endif %}"
)

class Llama3VisionAlpha(Llava15ChatHandler):
class Llama3VisionAlphaChatHandler(Llava15ChatHandler):
# question = "<image>" + q

# prompt = f"<|start_header_id|>user<|end_header_id|>\n\n{question}<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n"
Expand Down Expand Up @@ -3159,6 +3159,10 @@ class Llama3VisionAlpha(Llava15ChatHandler):
"{% endif %}"
)

# alias
Llama3VisionAlpha = Llama3VisionAlphaChatHandler


@register_chat_completion_handler("chatml-function-calling")
def chatml_function_calling(
llama: llama.Llama,
Expand Down Expand Up @@ -3193,7 +3197,6 @@ def chatml_function_calling(
llama_types.CreateChatCompletionResponse,
Iterator[llama_types.CreateChatCompletionStreamResponse],
]:
print(logprobs)
function_calling_template = (
"{% for message in messages %}"
"<|im_start|>{{ message.role }}\n"
Expand Down

0 comments on commit 165b4dc

Please sign in to comment.