Skip to content

Commit

Permalink
Code reformatting
Browse files Browse the repository at this point in the history
  • Loading branch information
kevinmessiaen committed Jun 4, 2024
1 parent c53d475 commit df174e0
Showing 1 changed file with 3 additions and 3 deletions.
6 changes: 3 additions & 3 deletions giskard/llm/client/bedrock.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ def __init__(
anthropic_version: str = "bedrock-2023-05-31",
):
# only supporting claude 3
if "claude-3" not in self.model:
if "claude-3" not in model:
raise LLMConfigurationError(f"Only claude-3 models are supported as of now, got {self.model}")

super().__init__(bedrock_runtime_client, model)
Expand Down Expand Up @@ -111,7 +111,7 @@ def _format_body(
"max_tokens": max_tokens,
"temperature": temperature,
"system": "\n".join(system_prompts),
"messages": messages,
"messages": input_msg_prompt,
}
)

Expand All @@ -131,7 +131,7 @@ def _parse_completion(self, completion, caller_id: Optional[str] = None) -> Chat
class LLamaBedrockClient(BaseBedrockClient):
def __init__(self, bedrock_runtime_client, model: str = "meta.llama3-8b-instruct-v1:0"):
# only supporting llama
if "llama" not in self.model:
if "llama" not in model:
raise LLMConfigurationError(f"Only Llama models are supported as of now, got {self.model}")

super().__init__(bedrock_runtime_client, model)
Expand Down

0 comments on commit df174e0

Please sign in to comment.