Skip to content

Commit

Permalink
better approach for LLMs, minor changes, new models and fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
Qewertyy committed Nov 19, 2023
1 parent e849f6f commit 73fb9d9
Show file tree
Hide file tree
Showing 14 changed files with 79 additions and 147 deletions.
7 changes: 4 additions & 3 deletions examples/async_bard.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
import asyncio
from lexica import AsyncClient
from lexica.constants import languageModels

async def main(prompt: str) -> dict:
async def async_main(prompt: str) -> dict:
client = AsyncClient()
response = await client.bard(prompt)
response = await client.ChatCompletion(prompt,languageModels.bard)
return response

if __name__ == "__main__":
print(asyncio.run(main("hello, who are you?")))
print(asyncio.run(async_main("hello, who are you?")))
7 changes: 4 additions & 3 deletions examples/async_gpt.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
import asyncio
from lexica import AsyncClient
from lexica.constants import languageModels

async def main(prompt: str) -> dict:
async def async_main(prompt: str) -> dict:
client = AsyncClient()
response = await client.gpt(prompt)
response = await client.ChatCompletion(prompt,languageModels.gpt)
return response

if __name__ == "__main__":
print(asyncio.run(main("hello, who are you?")))
print(asyncio.run(async_main("hello, who are you?")))
11 changes: 11 additions & 0 deletions examples/async_llama.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
import asyncio
from lexica import AsyncClient
from lexica.constants import languageModels

async def async_main(prompt: str) -> dict:
client = AsyncClient()
response = await client.ChatCompletion(prompt,languageModels.llama)
return response

if __name__ == "__main__":
print(asyncio.run(async_main("hello, who are you?")))
11 changes: 11 additions & 0 deletions examples/async_mistral.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
import asyncio
from lexica import AsyncClient
from lexica.constants import languageModels

async def async_main(prompt: str) -> dict:
client = AsyncClient()
response = await client.ChatCompletion(prompt,languageModels.mistral)
return response

if __name__ == "__main__":
print(asyncio.run(async_main("hello, who are you?")))
5 changes: 3 additions & 2 deletions examples/async_palm.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,12 @@
from lexica import AsyncClient
import asyncio
from lexica.constants import languageModels

async def async_main(prompt: str) -> dict:
client = AsyncClient()
response = await client.palm(prompt)
response = await client.ChatCompletion(prompt,languageModels.palm)
await client.close()
return response

if __name__ == "__main__":
print(asyncio.run(async_main("hello world")))
print(asyncio.run(async_main("hello, who are you?")))
3 changes: 2 additions & 1 deletion examples/bard.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
from lexica import Client
from lexica.constants import languageModels

def main(prompt: str) -> dict:
client = Client()
response = client.bard(prompt)
response = client.ChatCompletion(prompt,languageModels.bard)
return response

if __name__ == "__main__":
Expand Down
3 changes: 2 additions & 1 deletion examples/gpt.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
from lexica import Client
from lexica.constants import languageModels

def main(prompt: str) -> dict:
client = Client()
response = client.gpt(prompt)
response = client.ChatCompletion(prompt,languageModels.gpt)
return response

if __name__ == "__main__":
Expand Down
10 changes: 10 additions & 0 deletions examples/llama.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
from lexica import Client
from lexica.constants import languageModels

def main(prompt: str) -> dict:
client = Client()
response = client.ChatCompletion(prompt,languageModels.llama)
return response

if __name__ == "__main__":
print(main("hello, who are you?"))
10 changes: 10 additions & 0 deletions examples/mistral.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
from lexica import Client
from lexica.constants import languageModels

def main(prompt: str) -> dict:
client = Client()
response = client.ChatCompletion(prompt,languageModels.mistral)
return response

if __name__ == "__main__":
print(main("hello, who are you?"))
3 changes: 2 additions & 1 deletion examples/palm.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
from lexica import Client
from lexica.constants import languageModels

def main(prompt: str) -> dict:
client = Client()
response = client.palm(prompt)
response = client.ChatCompletion(prompt,languageModels.palm)
return response

if __name__ == "__main__":
Expand Down
8 changes: 8 additions & 0 deletions lexica/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,3 +3,11 @@
SESSION_HEADERS = {
"Host": "lexica.qewertyy.me",
}

class languageModels(object):
bard = {"modelId":20,"name":"Bard"}
palm = {"modelId":0,"name":"PaLM"}
palm2 = {"modelId":1,"name":"PaLM 2"}
mistral = {"modelId":21,"name":"LLAMA 2"}
llama = {"modelId":18,"name":"LLAMA"}
gpt = {"modelId":5,"name":"ChatGPT"}
74 changes: 6 additions & 68 deletions lexica/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,16 +49,17 @@ def getModels(self) -> dict:
resp = self._request(url=f'{self.url}/models')
return resp

def palm(self, prompt: str) -> dict:
def ChatCompletion(self, prompt: str,model : dict = languageModels.palm ) -> dict:
"""
Get an answer from PaLM 2 for the given prompt
Get an answer from LLMs' for the given prompt
Example:
>>> client = Client()
>>> response = client.palm("Hello, Who are you?")
>>> response = client.ChatCompletion("Hello, Who are you?",0)
>>> print(response)
Args:
prompt (str): Input text for the query.
model (dict): Model dict of the LLM defaults to palm.
Returns:
dict: Answer from the API in the following format:
Expand All @@ -69,40 +70,8 @@ def palm(self, prompt: str) -> dict:
}
"""
params = {
"model_id": 0,
"prompt": prompt
}
resp = self._request(
url=f'{self.url}/models',
method='POST',
params=params,
headers = {"content-type": "application/json"}
)
return resp

def gpt(self, prompt: str,context: str=False) -> dict:
"""
Get an answer from GPT-3.5-Turbo for the given prompt
Example:
>>> client = Client()
>>> response = client.gpt("Hello, Who are you?")
>>> print(response)
Args:
prompt (str): Input text for the query.
Returns:
dict: Answer from the API in the following format:
{
"message": str,
"content": str,
"code": int
}
"""
params = {
"model_id": 5,
"prompt": prompt
,"context": context if context else ''
"prompt": prompt,
"model_id": model.get('modelId',0),
}
resp = self._request(
url=f'{self.url}/models',
Expand Down Expand Up @@ -198,35 +167,4 @@ def getImages(self,task_id:str,request_id:str) -> dict:
json=payload,
headers={"content-type": "application/json"}
)
return resp

def bard(self, prompt: str,context: str=False) -> dict:
"""
Get an answer from Bard AI by google for the given prompt
Example:
>>> client = Client()
>>> response = client.bard("Hello, Who are you?")
>>> print(response)
Args:
prompt (str): Input text for the query.
Returns:
dict: Answer from the API in the following format:
{
"message": str,
"content": str,
"code": int
}
"""
params = {
"model_id": 20,
"prompt": prompt
}
resp = self._request(
url=f'{self.url}/models',
method='POST',
params=params,
headers={"content-type": "application/json"}
)
return resp
72 changes: 5 additions & 67 deletions lexica/core_async.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,48 +58,18 @@ async def __aenter__(self):
async def close(self) -> None:
"""Close async session"""
return await self.session.aclose()

async def palm(self, prompt: str,model_id:int=0) -> dict:
"""
Get an answer from PaLM 2 for the given prompt
Example:
>>> client = Client()
>>> response = await client.palm("Hello, Who are you?")
>>> print(response)
Args:
prompt (str): Input text for the query.

Returns:
dict: Answer from the API in the following format:
{
"message": str,
"content": str,
"code": int
}
async def ChatCompletion(self, prompt: str,model : dict = languageModels.palm2) -> dict:
"""
params = {
"model_id": model_id,
"prompt": prompt
}
resp = await self._request(
url=f'{self.url}/models',
method='POST',
params=params,
headers = {"content-type": "application/json"}
)
return resp

async def gpt(self, prompt: str,context: str = False) -> dict:
"""
Get an answer from GPT-3.5-Turbo for the given prompt
Get an answer from LLMs' for the given prompt
Example:
>>> client = Client()
>>> response = await client.gpt("Hello, Who are you?")
>>> response = await client.ChatCompletion("Hello, Who are you?")
>>> print(response)
Args:
prompt (str): Input text for the query.
model (dict): Model dict of the LLM defaults to palm.
Returns:
dict: Answer from the API in the following format:
Expand All @@ -110,40 +80,8 @@ async def gpt(self, prompt: str,context: str = False) -> dict:
}
"""
params = {
"model_id": 5,
"model_id": model.get('modelId',0),
"prompt": prompt,
"context": context if context else ''
}
resp = await self._request(
url=f'{self.url}/models',
method='POST',
params=params,
headers = {"content-type": "application/json"}
)
return resp

async def bard(self, prompt: str,context: str = False) -> dict:
"""
Get an answer from Bard AI by google for the given prompt
Example:
>>> client = Client()
>>> response = await client.bard("Hello, Who are you?")
>>> print(response)
Args:
prompt (str): Input text for the query.
Returns:
dict: Answer from the API in the following format:
{
"message": str,
"content": str,
"code": int
}
"""
params = {
"model_id": 20,
"prompt": prompt
}
resp = await self._request(
url=f'{self.url}/models',
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ def get_long_description():

setup(
name="lexica-api",
version="1.4.0",
version="1.4.3",
author="Qewertyy",
author_email="[email protected]",
description="The python package for api.qewertyy.me",
Expand Down

0 comments on commit 73fb9d9

Please sign in to comment.