Skip to content

Commit

Permalink
Ollama tool calls
Browse files Browse the repository at this point in the history
  • Loading branch information
ashpreetbedi committed May 2, 2024
1 parent d1e8d04 commit a1235f6
Show file tree
Hide file tree
Showing 3 changed files with 20 additions and 36 deletions.
4 changes: 4 additions & 0 deletions cookbook/llms/ollama/tools/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,10 @@ ollama pull llama3

ollama pull openhermes

ollama pull adrienbrault/nous-hermes2pro:Q8_0

ollama pull adrienbrault/nous-hermes2pro-llama3-8b:q8_0

ollama pull adrienbrault/nous-hermes2pro:Q8_0
```

Expand Down
38 changes: 9 additions & 29 deletions cookbook/llms/ollama/tools/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,17 +31,14 @@ def restart_assistant():


def main() -> None:
# Get LLM Model
llm_model = (
st.sidebar.selectbox("Select LLM", options=["llama3", "openhermes", "adrienbrault/nous-hermes2pro:Q8_0"])
or "llama3"
)
# Get LLM id
llm_id = st.sidebar.selectbox("Select LLM", options=["llama3", "hermes2pro-llama3"]) or "llama3"
# Set llm in session state
if "llm_model" not in st.session_state:
st.session_state["llm_model"] = llm_model
# Restart the assistant if llm_model changes
elif st.session_state["llm_model"] != llm_model:
st.session_state["llm_model"] = llm_model
if "llm_id" not in st.session_state:
st.session_state["llm_id"] = llm_id
# Restart the assistant if llm_id changes
elif st.session_state["llm_id"] != llm_id:
st.session_state["llm_id"] = llm_id
st.session_state["llm_updated"] = True
restart_assistant()

Expand Down Expand Up @@ -70,30 +67,13 @@ def main() -> None:
st.session_state["ddg_search_enabled"] = ddg_search
restart_assistant()

# Add tavily_search_enabled to session state
if "tavily_search_enabled" not in st.session_state:
st.session_state["tavily_search_enabled"] = False
# Get tavily_search_enabled from session state if set
tavily_search_enabled = st.session_state["tavily_search_enabled"]
# Checkbox for enabling tavily search
tavily_search = st.sidebar.checkbox(
"Enable Tavily Search",
value=tavily_search_enabled,
disabled=ddg_search,
help="Tavily Search is disabled if Web Search is enabled.",
)
if tavily_search_enabled != tavily_search:
st.session_state["tavily_search_enabled"] = tavily_search
restart_assistant()

# Get the assistant
local_assistant: Assistant
if "local_assistant" not in st.session_state or st.session_state["local_assistant"] is None:
logger.info(f"---*--- Creating {llm_model} Assistant ---*---")
logger.info(f"---*--- Creating {llm_id} Assistant ---*---")
local_assistant = get_local_assistant(
llm_model=llm_model,
llm_id=llm_id,
ddg_search=ddg_search_enabled,
tavily_search=tavily_search_enabled,
yfinance=yfinance_tools_enabled,
)
st.session_state["local_assistant"] = local_assistant
Expand Down
14 changes: 7 additions & 7 deletions cookbook/llms/ollama/tools/assistant.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,16 +3,14 @@
from typing import Any, List

from phi.assistant import Assistant
from phi.llm.ollama import OllamaTools
from phi.llm.ollama import Ollama
from phi.tools.duckduckgo import DuckDuckGo
from phi.tools.tavily import TavilyTools
from phi.tools.yfinance import YFinanceTools


def get_local_assistant(
llm_model: str = "llama3",
llm_id: str = "llama3",
ddg_search: bool = False,
tavily_search: bool = False,
yfinance: bool = False,
user_id: Optional[str] = None,
run_id: Optional[str] = None,
Expand All @@ -23,18 +21,20 @@ def get_local_assistant(
tools: List[Any] = []
if ddg_search:
tools.append(DuckDuckGo(fixed_max_results=3))
if tavily_search:
tools.append(TavilyTools())
if yfinance:
tools.append(
YFinanceTools(stock_price=True, stock_fundamentals=True, analyst_recommendations=True, company_news=True)
)

_llm_id = llm_id
if llm_id == "hermes2pro-llama3":
_llm_id = "adrienbrault/nous-hermes2pro-llama3-8b:q8_0"

assistant = Assistant(
name="local_assistant",
run_id=run_id,
user_id=user_id,
llm=OllamaTools(model=llm_model),
llm=Ollama(model=_llm_id),
tools=tools,
show_tool_calls=True,
# This setting tells the LLM to format messages in markdown
Expand Down

0 comments on commit a1235f6

Please sign in to comment.