Skip to content

Commit

Permalink
fix(factory.py): correctly apply bos token for llama3 instruct prompt…
Browse files Browse the repository at this point in the history
… template
  • Loading branch information
krrishdholakia committed Apr 26, 2024
1 parent 5bd50a6 commit ccb864b
Showing 1 changed file with 2 additions and 8 deletions.
10 changes: 2 additions & 8 deletions litellm/llms/prompt_templates/factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -1350,11 +1350,8 @@ def prompt_factory(
return llama_2_chat_pt(messages=messages)
elif "llama3" in model and "instruct" in model:
return hf_chat_template(
model=model,
model="meta-llama/Meta-Llama-3-8B-Instruct",
messages=messages,
chat_template=known_tokenizer_config[ # type: ignore
"meta-llama/Meta-Llama-3-8B-Instruct"
]["tokenizer"]["chat_template"],
)
elif custom_llm_provider == "perplexity":
for message in messages:
Expand All @@ -1369,11 +1366,8 @@ def prompt_factory(
"meta-llama/llama-3" in model or "meta-llama-3" in model
) and "instruct" in model:
return hf_chat_template(
model=model,
model="meta-llama/Meta-Llama-3-8B-Instruct",
messages=messages,
chat_template=known_tokenizer_config[ # type: ignore
"meta-llama/Meta-Llama-3-8B-Instruct"
]["tokenizer"]["chat_template"],
)
elif (
"tiiuae/falcon" in model
Expand Down

0 comments on commit ccb864b

Please sign in to comment.