forked from mem0ai/mem0
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
[feat]: add support for
llama2
model (mem0ai#331)
- Loading branch information
Showing
5 changed files
with
92 additions
and
1 deletion.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,36 @@ | ||
import os | ||
|
||
from langchain.llms import Replicate | ||
|
||
from embedchain.config import AppConfig | ||
from embedchain.embedchain import EmbedChain | ||
|
||
|
||
class Llama2App(EmbedChain): | ||
""" | ||
The EmbedChain Llama2App class. | ||
Has two functions: add and query. | ||
adds(data_type, url): adds the data from the given URL to the vector db. | ||
query(query): finds answer to the given query using vector database and LLM. | ||
""" | ||
|
||
def __init__(self, config: AppConfig = None): | ||
""" | ||
:param config: AppConfig instance to load as configuration. Optional. | ||
""" | ||
if "REPLICATE_API_TOKEN" not in os.environ: | ||
raise ValueError("Please set the REPLICATE_API_TOKEN environment variable to your OpenAI API key.") | ||
|
||
if config is None: | ||
config = AppConfig() | ||
|
||
super().__init__(config) | ||
|
||
def get_llm_model_answer(self, prompt, config: AppConfig = None): | ||
# TODO: Move the model and other inputs into config | ||
llm = Replicate( | ||
model="a16z-infra/llama13b-v2-chat:df7690f1994d94e96ad9d568eac121aecf50684a0b0963b25a41cc40061269e5", | ||
input={"temperature": 0.75, "max_length": 500, "top_p": 1}, | ||
) | ||
return llm(prompt) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -5,7 +5,7 @@ | |
|
||
setuptools.setup( | ||
name="embedchain", | ||
version="0.0.24", | ||
version="0.0.25", | ||
author="Taranjeet Singh", | ||
author_email="[email protected]", | ||
description="embedchain is a framework to easily create LLM powered bots over any dataset", # noqa:E501 | ||
|
@@ -34,6 +34,8 @@ | |
"sentence_transformers", | ||
"docx2txt", | ||
"pydantic==1.10.8", | ||
"replicate==0.9.0", | ||
"duckduckgo-search==3.8.4", | ||
], | ||
extras_require={"dev": ["black", "ruff", "isort", "pytest"]}, | ||
) |