Skip to content

Commit

Permalink
Merge branch 'dev' into feat/vectorstore-pgvector
Browse files Browse the repository at this point in the history
  • Loading branch information
ogabrielluiz committed Nov 10, 2023
2 parents 3e6e6b4 + fd4918f commit 413eb54
Show file tree
Hide file tree
Showing 17 changed files with 434 additions and 359 deletions.
2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -11,5 +11,5 @@ WORKDIR $HOME/app

COPY --chown=user . $HOME/app

RUN pip install langflow>==0.0.86 -U --user
RUN pip install langflow>==0.5.0 -U --user
CMD ["python", "-m", "langflow", "run", "--host", "0.0.0.0", "--port", "7860"]
4 changes: 2 additions & 2 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -74,10 +74,10 @@ backend:
make install_backend
ifeq ($(login),1)
@echo "Running backend without autologin";
poetry run langflow run --backend-only --port 7860 --host 0.0.0.0 --no-open-browser
poetry run langflow run --backend-only --port 7860 --host 0.0.0.0 --no-open-browser --log-level debug --workers 3
else
@echo "Running backend with autologin";
LANGFLOW_AUTO_LOGIN=True poetry run langflow run --backend-only --port 7860 --host 0.0.0.0 --no-open-browser
LANGFLOW_AUTO_LOGIN=True poetry run langflow run --backend-only --port 7860 --host 0.0.0.0 --no-open-browser --log-level debug --workers 3
endif

build_and_run:
Expand Down
9 changes: 5 additions & 4 deletions docker_example/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,14 +1,15 @@
FROM python:3.10-slim

RUN apt-get update && apt-get install gcc g++ git make -y
RUN apt-get update && apt-get install gcc g++ git make -y && apt-get clean \
&& rm -rf /var/lib/apt/lists/*
RUN useradd -m -u 1000 user
USER user
ENV HOME=/home/user \
PATH=/home/user/.local/bin:$PATH
PATH=/home/user/.local/bin:$PATH

WORKDIR $HOME/app

COPY --chown=user . $HOME/app

RUN pip install langflow>==0.0.71 -U --user
CMD ["langflow", "--host", "0.0.0.0", "--port", "7860"]
RUN pip install langflow>==0.5.0 -U --user
CMD ["python", "-m", "langflow", "run", "--host", "0.0.0.0", "--port", "7860"]
2 changes: 1 addition & 1 deletion docker_example/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,4 +7,4 @@ services:
dockerfile: Dockerfile
ports:
- "7860:7860"
command: langflow --host 0.0.0.0
command: langflow run --host 0.0.0.0
2 changes: 1 addition & 1 deletion docs/docs/guidelines/components.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ import ReactPlayer from "react-player";

# Component

Components are the building blocks of the flows. They are made of inputs, outputs, and parameters that define their functionality, providing a convenient and straightforward way to compose LLM-based applications. Learn more about components and how they work in the LangChain [documentation](https://docs.langchain.com/docs/category/components) section.
Components are the building blocks of the flows. They are made of inputs, outputs, and parameters that define their functionality, providing a convenient and straightforward way to compose LLM-based applications. Learn more about components and how they work in the LangChain [documentation](https://python.langchain.com/docs/integrations/components) section.

### Component's Features

Expand Down
8 changes: 5 additions & 3 deletions docs/docusaurus.config.js
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ module.exports = {
[
remarkCodeHike,
{
theme: "github-light",
theme: "github-dark",
showCopyButton: true,
lineNumbers: true,
},
Expand Down Expand Up @@ -112,8 +112,10 @@ module.exports = {
},
colorMode: {
defaultMode: "light",
disableSwitch: true,
respectPrefersColorScheme: false,
/* Allow users to chose light or dark mode. */
disableSwitch: false,
/* Respect user preferences, such as low light mode in the evening */
respectPrefersColorScheme: true,
},
announcementBar: {
content:
Expand Down
616 changes: 301 additions & 315 deletions poetry.lock

Large diffs are not rendered by default.

13 changes: 6 additions & 7 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -26,15 +26,15 @@ langflow = "langflow.__main__:main"

[tool.poetry.dependencies]
python = ">=3.9,<3.11"
fastapi = "^0.103.0"
fastapi = "^0.104.0"
uvicorn = "^0.23.0"
beautifulsoup4 = "^4.12.2"
google-search-results = "^2.4.1"
google-api-python-client = "^2.79.0"
typer = "^0.9.0"
gunicorn = "^21.2.0"
langchain = "^0.0.312"
openai = "^0.27.8"
openai = "^1.0.1"
pandas = "2.0.3"
chromadb = "^0.3.21"
huggingface-hub = { version = "^0.16.0", extras = ["inference"] }
Expand All @@ -48,7 +48,7 @@ pysrt = "^1.1.2"
fake-useragent = "^1.2.1"
docstring-parser = "^0.15"
psycopg2-binary = "^2.9.6"
pyarrow = "^12.0.0"
pyarrow = "^14.0.0"
tiktoken = "~0.5.0"
wikipedia = "^1.4.0"
qdrant-client = "^1.4.0"
Expand All @@ -66,12 +66,12 @@ orjson = "3.9.3"
multiprocess = "^0.70.14"
cachetools = "^5.3.1"
types-cachetools = "^5.3.0.5"
appdirs = "^1.4.4"
platformdirs = "^3.11.0"
pinecone-client = "^2.2.2"
supabase = "^1.0.3"
supabase = "^2.0.3"
pymongo = "^4.4.0"
certifi = "^2023.5.7"
google-cloud-aiplatform = "^1.26.1"
google-cloud-aiplatform = "^1.36.0"
psycopg = "^3.1.9"
psycopg-binary = "^3.1.9"
fastavro = "^1.8.0"
Expand Down Expand Up @@ -108,7 +108,6 @@ requests = "^2.28.0"
pytest-cov = "^4.0.0"
pandas-stubs = "^2.0.0.230412"
types-pillow = "^9.5.0.2"
types-appdirs = "^1.4.3.5"
types-pyyaml = "^6.0.12.8"
types-python-jose = "^3.3.4.8"
types-passlib = "^1.7.7.13"
Expand Down
7 changes: 5 additions & 2 deletions src/backend/langflow/api/v1/chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,10 @@
from langflow.api.v1.schemas import BuildStatus, BuiltResponse, InitResponse, StreamData

from langflow.graph.graph.base import Graph
from langflow.services.auth.utils import get_current_active_user, get_current_user
from langflow.services.auth.utils import (
get_current_active_user,
get_current_user_by_jwt,
)
from langflow.services.cache.utils import update_build_status
from loguru import logger
from langflow.services.getters import get_chat_service, get_session, get_cache_service
Expand All @@ -34,8 +37,8 @@ async def chat(
):
"""Websocket endpoint for chat."""
try:
user = await get_current_user_by_jwt(token, db)
await websocket.accept()
user = await get_current_user(token, db)
if not user:
await websocket.close(
code=status.WS_1008_POLICY_VIOLATION, reason="Unauthorized"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,11 @@ class ConversationalAgent(CustomComponent):

def build_config(self):
openai_function_models = [
"gpt-3.5-turbo-0613",
"gpt-3.5-turbo-16k-0613",
"gpt-4-0613",
"gpt-4-32k-0613",
"gpt-4-1106-preview",
"gpt-3.5-turbo",
"gpt-3.5-turbo-16k",
"gpt-4",
"gpt-4-32k",
]
return {
"tools": {"is_list": True, "display_name": "Tools"},
Expand Down
64 changes: 64 additions & 0 deletions src/backend/langflow/components/vectorstores/Redis.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
from typing import Optional
from langflow import CustomComponent

from langchain.vectorstores.redis import Redis
from langchain.schema import Document
from langchain.vectorstores.base import VectorStore
from langchain.embeddings.base import Embeddings


class RedisComponent(CustomComponent):
"""
A custom component for implementing a Vector Store using Redis.
"""

display_name: str = "Redis"
description: str = "Implementation of Vector Store using Redis"
documentation = "https://python.langchain.com/docs/integrations/vectorstores/redis"
beta = True

def build_config(self):
"""
Builds the configuration for the component.
Returns:
- dict: A dictionary containing the configuration options for the component.
"""
return {
"index_name": {"display_name": "Index Name", "value": "your_index"},
"code": {"show": False, "display_name": "Code"},
"documents": {"display_name": "Documents", "is_list": True},
"embedding": {"display_name": "Embedding"},
"redis_server_url": {
"display_name": "Redis Server Connection String",
"advanced": False,
},
"redis_index_name": {"display_name": "Redis Index", "advanced": False},
}

def build(
self,
embedding: Embeddings,
redis_server_url: str,
redis_index_name: str,
documents: Optional[Document] = None,
) -> VectorStore:
"""
Builds the Vector Store or BaseRetriever object.
Args:
- embedding (Embeddings): The embeddings to use for the Vector Store.
- documents (Optional[Document]): The documents to use for the Vector Store.
- redis_index_name (str): The name of the Redis index.
- redis_server_url (str): The URL for the Redis server.
Returns:
- VectorStore: The Vector Store object.
"""

return Redis.from_documents(
documents=documents, # type: ignore
embedding=embedding,
redis_url=redis_server_url,
index_name=redis_index_name,
)
26 changes: 25 additions & 1 deletion src/backend/langflow/services/auth/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
from langflow.services.getters import get_session, get_settings_service
from sqlmodel import Session

oauth2_login = OAuth2PasswordBearer(tokenUrl="api/v1/login")
oauth2_login = OAuth2PasswordBearer(tokenUrl="api/v1/login", auto_error=False)

API_KEY_NAME = "x-api-key"

Expand Down Expand Up @@ -69,6 +69,30 @@ async def api_key_security(


async def get_current_user(
token: str = Security(oauth2_login),
query_param: str = Security(api_key_query),
header_param: str = Security(api_key_header),
db: Session = Depends(get_session),
) -> User:
if token:
return await get_current_user_by_jwt(token, db)
else:
if not query_param and not header_param:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail="An API key must be passed as query or header",
)
user = await api_key_security(query_param, header_param, db)
if user:
return user

raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail="Invalid or missing API key",
)


async def get_current_user_by_jwt(
token: Annotated[str, Depends(oauth2_login)],
db: Session = Depends(get_session),
) -> User:
Expand Down
2 changes: 1 addition & 1 deletion src/backend/langflow/services/cache/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from collections import OrderedDict
from pathlib import Path
from typing import TYPE_CHECKING, Any, Dict
from appdirs import user_cache_dir
from platformdirs import user_cache_dir
from fastapi import UploadFile
from langflow.api.v1.schemas import BuildStatus
from langflow.services.database.models.base import orjson_dumps
Expand Down
4 changes: 2 additions & 2 deletions src/backend/langflow/services/settings/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,14 +55,14 @@ class Settings(BaseSettings):
@validator("CONFIG_DIR", pre=True, allow_reuse=True)
def set_langflow_dir(cls, value):
if not value:
import appdirs
from platformdirs import user_cache_dir

# Define the app name and author
app_name = "langflow"
app_author = "logspace"

# Get the cache directory for the application
cache_dir = appdirs.user_cache_dir(app_name, app_author)
cache_dir = user_cache_dir(app_name, app_author)

# Create a .langflow directory inside the cache directory
value = Path(cache_dir)
Expand Down
10 changes: 4 additions & 6 deletions src/backend/langflow/utils/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,16 +6,14 @@
"text-ada-001",
]
CHAT_OPENAI_MODELS = [
"gpt-3.5-turbo-0613",
"gpt-3.5-turbo",
"gpt-3.5-turbo-16k-0613",
"gpt-3.5-turbo-16k",
"gpt-4-0613",
"gpt-4-32k-0613",
"gpt-4-1106-preview",
"gpt-4",
"gpt-4-32k",
"gpt-3.5-turbo",
"gpt-3.5-turbo-16k",
]


ANTHROPIC_MODELS = [
# largest model, ideal for a wide range of more complex tasks.
"claude-v1",
Expand Down
4 changes: 2 additions & 2 deletions src/backend/langflow/utils/logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,9 @@
from loguru import logger
from pathlib import Path
from rich.logging import RichHandler
from platformdirs import user_cache_dir
import os
import orjson
import appdirs


VALID_LOG_LEVELS = ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]
Expand Down Expand Up @@ -50,7 +50,7 @@ def configure(log_level: Optional[str] = None, log_file: Optional[Path] = None):
)

if not log_file:
cache_dir = Path(appdirs.user_cache_dir("langflow"))
cache_dir = Path(user_cache_dir("langflow"))
log_file = cache_dir / "langflow.log"

log_file = Path(log_file)
Expand Down
11 changes: 4 additions & 7 deletions tests/test_llms_template.py
Original file line number Diff line number Diff line change
Expand Up @@ -309,17 +309,14 @@ def test_chat_open_ai(client: TestClient, logged_in_headers):
"placeholder": "",
"show": True,
"multiline": False,
"value": "gpt-3.5-turbo-0613",
"value": "gpt-4-1106-preview",
"password": False,
"options": [
"gpt-3.5-turbo-0613",
"gpt-3.5-turbo",
"gpt-3.5-turbo-16k-0613",
"gpt-3.5-turbo-16k",
"gpt-4-0613",
"gpt-4-32k-0613",
"gpt-4-1106-preview",
"gpt-4",
"gpt-4-32k",
"gpt-3.5-turbo",
"gpt-3.5-turbo-16k",
],
"name": "model_name",
"type": "str",
Expand Down

0 comments on commit 413eb54

Please sign in to comment.