Skip to content

Commit

Permalink
bump version
Browse files Browse the repository at this point in the history
  • Loading branch information
Qewertyy committed Apr 30, 2024
1 parent 91c5c3b commit 7aeee3f
Show file tree
Hide file tree
Showing 5 changed files with 3 additions and 27 deletions.
13 changes: 0 additions & 13 deletions examples/async_palm.py

This file was deleted.

11 changes: 0 additions & 11 deletions examples/palm.py

This file was deleted.

2 changes: 1 addition & 1 deletion lexica/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,5 +10,5 @@
"languageModels"
]

__version__ = "1.5.8"
__version__ = "1.5.9"
__author__ = "Qewertyy <[email protected]>"
2 changes: 1 addition & 1 deletion lexica/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ def getModels(self) -> dict:
resp = self._request(url=f'{self.url}/models')
return resp

def ChatCompletion(self : "Client", prompt: str,model : dict = languageModels.palm ,*args, **kwargs) -> dict:
def ChatCompletion(self : "Client", prompt: str,model : dict = languageModels.gemini ,*args, **kwargs) -> dict:
"""
Get an answer from LLMs' for the given prompt
Example:
Expand Down
2 changes: 1 addition & 1 deletion lexica/core_async.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ async def close(self) -> None:
"""Close async session"""
return await self.session.aclose()

async def ChatCompletion(self : "AsyncClient", prompt: str,model : dict = languageModels.palm2,*args, **kwargs) -> dict:
async def ChatCompletion(self : "AsyncClient", prompt: str,model : dict = languageModels.gemini,*args, **kwargs) -> dict:
"""
Get an answer from LLMs' for the given prompt
Example:
Expand Down

0 comments on commit 7aeee3f

Please sign in to comment.