diff --git a/cookbook/llms/ollama/tools/README.md b/cookbook/llms/ollama/tools/README.md index 3baccac22..b43eb4b7b 100644 --- a/cookbook/llms/ollama/tools/README.md +++ b/cookbook/llms/ollama/tools/README.md @@ -13,6 +13,10 @@ ollama pull llama3 ollama pull openhermes +ollama pull adrienbrault/nous-hermes2pro:Q8_0 + +ollama pull adrienbrault/nous-hermes2pro-llama3-8b:q8_0 + ollama pull adrienbrault/nous-hermes2pro:Q8_0 ``` diff --git a/cookbook/llms/ollama/tools/app.py b/cookbook/llms/ollama/tools/app.py index d0db70b4b..72908db78 100644 --- a/cookbook/llms/ollama/tools/app.py +++ b/cookbook/llms/ollama/tools/app.py @@ -31,17 +31,14 @@ def restart_assistant(): def main() -> None: - # Get LLM Model - llm_model = ( - st.sidebar.selectbox("Select LLM", options=["llama3", "openhermes", "adrienbrault/nous-hermes2pro:Q8_0"]) - or "llama3" - ) + # Get LLM id + llm_id = st.sidebar.selectbox("Select LLM", options=["llama3", "hermes2pro-llama3"]) or "llama3" # Set llm in session state - if "llm_model" not in st.session_state: - st.session_state["llm_model"] = llm_model - # Restart the assistant if llm_model changes - elif st.session_state["llm_model"] != llm_model: - st.session_state["llm_model"] = llm_model + if "llm_id" not in st.session_state: + st.session_state["llm_id"] = llm_id + # Restart the assistant if llm_id changes + elif st.session_state["llm_id"] != llm_id: + st.session_state["llm_id"] = llm_id st.session_state["llm_updated"] = True restart_assistant() @@ -70,30 +67,13 @@ def main() -> None: st.session_state["ddg_search_enabled"] = ddg_search restart_assistant() - # Add tavily_search_enabled to session state - if "tavily_search_enabled" not in st.session_state: - st.session_state["tavily_search_enabled"] = False - # Get tavily_search_enabled from session state if set - tavily_search_enabled = st.session_state["tavily_search_enabled"] - # Checkbox for enabling tavily search - tavily_search = st.sidebar.checkbox( - "Enable Tavily Search", - value=tavily_search_enabled, - disabled=ddg_search, - help="Tavily Search is disabled if Web Search is enabled.", - ) - if tavily_search_enabled != tavily_search: - st.session_state["tavily_search_enabled"] = tavily_search - restart_assistant() - # Get the assistant local_assistant: Assistant if "local_assistant" not in st.session_state or st.session_state["local_assistant"] is None: - logger.info(f"---*--- Creating {llm_model} Assistant ---*---") + logger.info(f"---*--- Creating {llm_id} Assistant ---*---") local_assistant = get_local_assistant( - llm_model=llm_model, + llm_id=llm_id, ddg_search=ddg_search_enabled, - tavily_search=tavily_search_enabled, yfinance=yfinance_tools_enabled, ) st.session_state["local_assistant"] = local_assistant diff --git a/cookbook/llms/ollama/tools/assistant.py b/cookbook/llms/ollama/tools/assistant.py index 7c820e0e4..7904aefea 100644 --- a/cookbook/llms/ollama/tools/assistant.py +++ b/cookbook/llms/ollama/tools/assistant.py @@ -3,16 +3,14 @@ from typing import Any, List from phi.assistant import Assistant -from phi.llm.ollama import OllamaTools +from phi.llm.ollama import Ollama from phi.tools.duckduckgo import DuckDuckGo -from phi.tools.tavily import TavilyTools from phi.tools.yfinance import YFinanceTools def get_local_assistant( - llm_model: str = "llama3", + llm_id: str = "llama3", ddg_search: bool = False, - tavily_search: bool = False, yfinance: bool = False, user_id: Optional[str] = None, run_id: Optional[str] = None, @@ -23,18 +21,20 @@ def get_local_assistant( tools: List[Any] = [] if ddg_search: tools.append(DuckDuckGo(fixed_max_results=3)) - if tavily_search: - tools.append(TavilyTools()) if yfinance: tools.append( YFinanceTools(stock_price=True, stock_fundamentals=True, analyst_recommendations=True, company_news=True) ) + _llm_id = llm_id + if llm_id == "hermes2pro-llama3": + _llm_id = "adrienbrault/nous-hermes2pro-llama3-8b:q8_0" + assistant = Assistant( name="local_assistant", run_id=run_id, user_id=user_id, - llm=OllamaTools(model=llm_model), + llm=Ollama(model=_llm_id), tools=tools, show_tool_calls=True, # This setting tells the LLM to format messages in markdown