diff --git a/README.md b/README.md index 726ceac8fa..9868a03206 100644 --- a/README.md +++ b/README.md @@ -14,6 +14,29 @@ Embedchain is a framework to easily create LLM powered bots over any dataset. If pip install embedchain ``` +## 🔥 Latest + +- **[2023/07/19]** Released support for 🦙 `llama2` model. Start creating your `llama2` based bots like this: + + ```python + import os + + from embedchain import Llama2App + + os.environ['REPLICATE_API_TOKEN'] = "REPLICATE API TOKEN" + + zuck_bot = Llama2App() + + # Embed your data + zuck_bot.add("youtube_video", "https://www.youtube.com/watch?v=Ff4fRgnuFgQ") + zuck_bot.add("web_page", "https://en.wikipedia.org/wiki/Mark_Zuckerberg") + + # Nice, your bot is ready now. Start asking questions to your bot. + zuck_bot.query("Who is Mark Zuckerberg?") + # Answer: Mark Zuckerberg is an American internet entrepreneur and business magnate. He is the co-founder and CEO of Facebook. + ``` + + ## 🔍 Demo Try out embedchain in your browser: diff --git a/docs/advanced/app_types.mdx b/docs/advanced/app_types.mdx index 518692e5b3..1c6895adaf 100644 --- a/docs/advanced/app_types.mdx +++ b/docs/advanced/app_types.mdx @@ -23,6 +23,35 @@ import os os.environ["OPENAI_API_KEY"] = "sk-xxxx" ``` +### Llama2App + +```python +import os + +from embedchain import Llama2App + +os.environ['REPLICATE_API_TOKEN'] = "REPLICATE API TOKEN" + +zuck_bot = Llama2App() + +# Embed your data +zuck_bot.add("youtube_video", "https://www.youtube.com/watch?v=Ff4fRgnuFgQ") +zuck_bot.add("web_page", "https://en.wikipedia.org/wiki/Mark_Zuckerberg") + +# Nice, your bot is ready now. Start asking questions to your bot. +zuck_bot.query("Who is Mark Zuckerberg?") +# Answer: Mark Zuckerberg is an American internet entrepreneur and business magnate. He is the co-founder and CEO of Facebook. Born in 1984, he dropped out of Harvard University to focus on his social media platform, which has since grown to become one of the largest and most influential technology companies in the world. + +# Enable web search for your bot +zuck_bot.online = True # enable internet access for the bot +zuck_bot.query("Who owns the new threads app and when it was founded?") +# Answer: Based on the context provided, the new Threads app is owned by Meta, the parent company of Facebook, Instagram, and WhatsApp. +``` + +- `Llama2App` uses Replicate's LLM model, so these are paid models. You can get the `REPLICATE_API_TOKEN` by registering on [their website](https://replicate.com/account). +- `Llama2App` uses OpenAI's embedding model to create embeddings for chunks. Make sure that you have an OpenAI account and an API key. If you have don't have an API key, you can create one by visiting [this link](https://platform.openai.com/account/api-keys). + + ### OpenSourceApp ```python diff --git a/embedchain/__init__.py b/embedchain/__init__.py index 371713f126..7fd6ce441b 100644 --- a/embedchain/__init__.py +++ b/embedchain/__init__.py @@ -4,6 +4,7 @@ from embedchain.apps.App import App # noqa: F401 from embedchain.apps.CustomApp import CustomApp # noqa: F401 +from embedchain.apps.Llama2App import Llama2App # noqa: F401 from embedchain.apps.OpenSourceApp import OpenSourceApp # noqa: F401 from embedchain.apps.PersonApp import (PersonApp, # noqa: F401 PersonOpenSourceApp) diff --git a/embedchain/apps/Llama2App.py b/embedchain/apps/Llama2App.py new file mode 100644 index 0000000000..12e88a2996 --- /dev/null +++ b/embedchain/apps/Llama2App.py @@ -0,0 +1,36 @@ +import os + +from langchain.llms import Replicate + +from embedchain.config import AppConfig +from embedchain.embedchain import EmbedChain + + +class Llama2App(EmbedChain): + """ + The EmbedChain Llama2App class. + Has two functions: add and query. + + adds(data_type, url): adds the data from the given URL to the vector db. + query(query): finds answer to the given query using vector database and LLM. + """ + + def __init__(self, config: AppConfig = None): + """ + :param config: AppConfig instance to load as configuration. Optional. + """ + if "REPLICATE_API_TOKEN" not in os.environ: + raise ValueError("Please set the REPLICATE_API_TOKEN environment variable to your OpenAI API key.") + + if config is None: + config = AppConfig() + + super().__init__(config) + + def get_llm_model_answer(self, prompt, config: AppConfig = None): + # TODO: Move the model and other inputs into config + llm = Replicate( + model="a16z-infra/llama13b-v2-chat:df7690f1994d94e96ad9d568eac121aecf50684a0b0963b25a41cc40061269e5", + input={"temperature": 0.75, "max_length": 500, "top_p": 1}, + ) + return llm(prompt) diff --git a/setup.py b/setup.py index adab6db864..6a8165333e 100644 --- a/setup.py +++ b/setup.py @@ -5,7 +5,7 @@ setuptools.setup( name="embedchain", - version="0.0.24", + version="0.0.25", author="Taranjeet Singh", author_email="reachtotj@gmail.com", description="embedchain is a framework to easily create LLM powered bots over any dataset", # noqa:E501 @@ -34,6 +34,8 @@ "sentence_transformers", "docx2txt", "pydantic==1.10.8", + "replicate==0.9.0", + "duckduckgo-search==3.8.4", ], extras_require={"dev": ["black", "ruff", "isort", "pytest"]}, )