Skip to content

Commit

Permalink
Fixed system prompt extracting of bedrock clients
Browse files Browse the repository at this point in the history
  • Loading branch information
kevinmessiaen committed May 22, 2024
1 parent c4cad71 commit 69dc794
Showing 1 changed file with 4 additions and 8 deletions.
12 changes: 4 additions & 8 deletions giskard/llm/client/bedrock.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,16 +39,12 @@ def complete(
if "claude-3" not in self.model:
raise LLMConfigurationError(f"Only claude-3 models are supported as of now, got {self.model}")

# extract system prompt from messages
system_prompt = ""
if len(messages) > 1:
if messages[0].role.lower() == "user" and messages[1].role.lower() == "user":
system_prompt = messages[0].content
messages = messages[1:]

# Create the messages format needed for bedrock specifically
input_msg_prompt = []
system_prompts = []
for msg in messages:
if msg.role.lower() == "system":
system_prompts = system_prompts.append(msg.content)
if msg.role.lower() == "assistant":
input_msg_prompt.append({"role": "assistant", "content": [{"type": "text", "text": msg.content}]})
else:
Expand All @@ -60,7 +56,7 @@ def complete(
"anthropic_version": "bedrock-2023-05-31",
"max_tokens": max_tokens,
"temperature": temperature,
"system": system_prompt,
"system": "\n".join(system_prompts),
"messages": input_msg_prompt,
}
)
Expand Down

0 comments on commit 69dc794

Please sign in to comment.