forked from openai/evals
-
Notifications
You must be signed in to change notification settings - Fork 0
/
langchain_math.py
30 lines (21 loc) · 976 Bytes
/
langchain_math.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
import importlib
from typing import Optional
from langchain import OpenAI, LLMMathChain
from evals.prompt.base import CompletionPrompt
from evals.record import record_sampling
class LangChainCompletionResult:
def __init__(self, response) -> None:
self.response = response
def get_completions(self) -> list[str]:
return [self.response.strip()]
class LangChainMathChainCompletionFn:
def __init__(self, **kwargs) -> None:
llm = OpenAI(temperature=0)
self.llm_math = LLMMathChain(llm=llm)
def __call__(self, prompt, **kwargs) -> LangChainCompletionResult:
prompt = CompletionPrompt(prompt).to_formatted_prompt()
response = self.llm_math.run(prompt)
# The LangChain response comes with `Answer: ` ahead of this, let's strip it out
response = response.strip("Answer:").strip()
record_sampling(prompt=prompt, sampled=response)
return LangChainCompletionResult(response)