Skip to content

Commit

Permalink
Fixed parameter incorrect (openai#1378)
Browse files Browse the repository at this point in the history
  • Loading branch information
assert6 committed Jan 3, 2024
1 parent ded9382 commit 1dd2ea2
Showing 1 changed file with 4 additions and 4 deletions.
8 changes: 4 additions & 4 deletions evals/completion_fns/langchain_llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,16 +66,16 @@ def _convert_dict_to_langchain_message(_dict) -> BaseMessage:


class LangChainChatModelCompletionFn(CompletionFn):
def __init__(self, llm: str, llm_kwargs: Optional[dict] = None, **kwargs) -> None:
def __init__(self, llm: str, chat_model_kwargs: Optional[dict] = None, **kwargs) -> None:
# Import and resolve self.llm to an instance of llm argument here,
# assuming it's always a subclass of BaseLLM
if llm_kwargs is None:
llm_kwargs = {}
if chat_model_kwargs is None:
chat_model_kwargs = {}
module = importlib.import_module("langchain.chat_models")
LLMClass = getattr(module, llm)

if issubclass(LLMClass, BaseChatModel):
self.llm = LLMClass(**llm_kwargs)
self.llm = LLMClass(**chat_model_kwargs)
else:
raise ValueError(f"{llm} is not a subclass of BaseChatModel")

Expand Down

0 comments on commit 1dd2ea2

Please sign in to comment.