Skip to content

Commit

Permalink
merging branch release into dev
Browse files Browse the repository at this point in the history
  • Loading branch information
Cristhianzl committed Jun 30, 2023
1 parent 683c693 commit 168a1b9
Show file tree
Hide file tree
Showing 75 changed files with 2,958 additions and 850 deletions.
831 changes: 608 additions & 223 deletions poetry.lock

Large diffs are not rendered by default.

29 changes: 15 additions & 14 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "langflow"
version = "0.2.2"
version = "0.2.7"
description = "A Python package with a built-in web application"
authors = ["Logspace <[email protected]>"]
maintainers = [
Expand All @@ -25,22 +25,21 @@ langflow = "langflow.__main__:main"
python = ">=3.9,<3.12"
fastapi = "^0.98.0"
uvicorn = "^0.22.0"
beautifulsoup4 = "^4.11.2"
beautifulsoup4 = "^4.12.2"
google-search-results = "^2.4.1"
google-api-python-client = "^2.79.0"
typer = "^0.9.0"
gunicorn = "^20.1.0"
langchain = "^0.0.215"
langchain = "^0.0.219"
openai = "^0.27.8"
types-pyyaml = "^6.0.12.8"
pandas = "^1.5.3"
pandas = "^2.0.0"
chromadb = "^0.3.21"
huggingface-hub = "^0.13.3"
huggingface-hub = "^0.15.0"
rich = "^13.4.2"
llama-cpp-python = "~0.1.0"
networkx = "^3.1"
unstructured = "^0.5.11"
pypdf = "^3.7.1"
unstructured = "^0.7.0"
pypdf = "^3.11.0"
lxml = "^4.9.2"
pysrt = "^1.1.2"
fake-useragent = "^1.1.3"
Expand All @@ -49,18 +48,18 @@ psycopg2-binary = "^2.9.6"
pyarrow = "^12.0.0"
tiktoken = "~0.4.0"
wikipedia = "^1.4.0"
langchain-serve = { version = ">0.0.39", optional = true }
qdrant-client = "^1.2.0"
langchain-serve = { version = ">0.0.47", optional = true }
qdrant-client = "^1.3.0"
websockets = "^10.3"
weaviate-client = "^3.21.0"
jina = "3.15.2"
sentence-transformers = "^2.2.2"
ctransformers = "^0.2.2"
cohere = "^4.6.0"
ctransformers = "^0.2.10"
cohere = "^4.11.0"
python-multipart = "^0.0.6"
sqlmodel = "^0.0.8"
faiss-cpu = "^1.7.4"
anthropic = "^0.2.10"
anthropic = "^0.3.0"
orjson = "^3.9.1"
multiprocess = "^0.70.14"
cachetools = "^5.3.1"
Expand All @@ -70,7 +69,8 @@ pinecone-client = "^2.2.2"
supabase = "^1.0.3"
pymongo = "^4.4.0"
certifi = "^2023.5.7"

psycopg = "^3.1.9"
psycopg-binary = "^3.1.9"

[tool.poetry.dev-dependencies]
black = "^23.1.0"
Expand All @@ -85,6 +85,7 @@ pytest-cov = "^4.0.0"
pandas-stubs = "^2.0.0.230412"
types-pillow = "^9.5.0.2"
types-appdirs = "^1.4.3.5"
types-pyyaml = "^6.0.12.8"


[tool.poetry.extras]
Expand Down
40 changes: 4 additions & 36 deletions src/backend/langflow/__main__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
import sys
import time
from fastapi import FastAPI
import httpx
from multiprocess import Process, cpu_count # type: ignore
import platform
Expand All @@ -11,9 +10,7 @@
from rich import box
from rich import print as rprint
import typer
from fastapi.staticfiles import StaticFiles
from fastapi.responses import FileResponse
from langflow.main import create_app
from langflow.main import setup_app
from langflow.settings import settings
from langflow.utils.logger import configure, logger
import webbrowser
Expand Down Expand Up @@ -144,15 +141,9 @@ def serve(
remove_api_keys=remove_api_keys,
cache=cache,
)
# get the directory of the current file
if not path:
frontend_path = Path(__file__).parent
static_files_dir = frontend_path / "frontend"
else:
static_files_dir = Path(path)

app = create_app()
setup_static_files(app, static_files_dir)
# create path object if path is provided
static_files_dir: Optional[Path] = Path(path) if path else None
app = setup_app(static_files_dir=static_files_dir)
# check if port is being used
if is_port_in_use(port, host):
port = get_free_port(port)
Expand Down Expand Up @@ -200,29 +191,6 @@ def run_on_windows(host, port, log_level, options, app):
run_langflow(host, port, log_level, options, app)


def setup_static_files(app: FastAPI, static_files_dir: Path):
"""
Setup the static files directory.
Args:
app (FastAPI): FastAPI app.
path (str): Path to the static files directory.
"""
app.mount(
"/",
StaticFiles(directory=static_files_dir, html=True),
name="static",
)

@app.exception_handler(404)
async def custom_404_handler(request, __):
path = static_files_dir / "index.html"

if not path.exists():
raise RuntimeError(f"File at path {path} does not exist.")
return FileResponse(path)


def is_port_in_use(port, host="localhost"):
"""
Check if a port is in use.
Expand Down
48 changes: 33 additions & 15 deletions src/backend/langflow/api/v1/chat.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,6 @@
from fastapi import (
APIRouter,
HTTPException,
WebSocket,
WebSocketException,
status,
)
from fastapi import APIRouter, HTTPException, WebSocket, WebSocketException, status
from fastapi.responses import StreamingResponse
from langflow.api.v1.schemas import BuiltResponse, InitResponse, StreamData
from langflow.api.v1.schemas import BuildStatus, BuiltResponse, InitResponse, StreamData

from langflow.chat.manager import ChatManager
from langflow.graph.graph.base import Graph
Expand All @@ -32,15 +26,29 @@ async def chat(client_id: str, websocket: WebSocket):
await websocket.close(code=status.WS_1011_INTERNAL_ERROR, reason=str(exc))


@router.post("/build/init", response_model=InitResponse, status_code=201)
async def init_build(graph_data: dict):
@router.post("/build/init/{flow_id}", response_model=InitResponse, status_code=201)
async def init_build(graph_data: dict, flow_id: str):
"""Initialize the build by storing graph data and returning a unique session ID."""

try:
flow_id = graph_data.get("id")
if flow_id is None:
raise ValueError("No ID provided")
flow_data_store[flow_id] = graph_data
# Check if already building
if (
flow_id in flow_data_store
and flow_data_store[flow_id]["status"] == BuildStatus.IN_PROGRESS
):
return InitResponse(flowId=flow_id)

# Delete from cache if already exists
if flow_id in chat_manager.in_memory_cache:
with chat_manager.in_memory_cache._lock:
chat_manager.in_memory_cache.delete(flow_id)
logger.debug(f"Deleted flow {flow_id} from cache")
flow_data_store[flow_id] = {
"graph_data": graph_data,
"status": BuildStatus.STARTED,
}

return InitResponse(flowId=flow_id)
except Exception as exc:
Expand All @@ -52,8 +60,9 @@ async def init_build(graph_data: dict):
async def build_status(flow_id: str):
"""Check the flow_id is in the flow_data_store."""
try:
built = flow_id in flow_data_store and not isinstance(
flow_data_store[flow_id], dict
built = (
flow_id in flow_data_store
and flow_data_store[flow_id]["status"] == BuildStatus.SUCCESS
)

return BuiltResponse(
Expand All @@ -77,7 +86,12 @@ async def event_stream(flow_id):
yield str(StreamData(event="error", data={"error": error_message}))
return

graph_data = flow_data_store[flow_id].get("data")
if flow_data_store[flow_id].get("status") == BuildStatus.IN_PROGRESS:
error_message = "Already building"
yield str(StreamData(event="error", data={"error": error_message}))
return

graph_data = flow_data_store[flow_id].get("graph_data")

if not graph_data:
error_message = "No data provided"
Expand All @@ -95,6 +109,7 @@ async def event_stream(flow_id):
return

number_of_nodes = len(graph.nodes)
flow_data_store[flow_id]["status"] = BuildStatus.IN_PROGRESS
for i, vertex in enumerate(graph.generator_build(), 1):
try:
log_dict = {
Expand All @@ -110,6 +125,7 @@ async def event_stream(flow_id):
except Exception as exc:
params = str(exc)
valid = False
flow_data_store[flow_id]["status"] = BuildStatus.FAILURE

response = {
"valid": valid,
Expand All @@ -121,8 +137,10 @@ async def event_stream(flow_id):
yield str(StreamData(event="message", data=response))

chat_manager.set_cache(flow_id, graph.build())
flow_data_store[flow_id]["status"] = BuildStatus.SUCCESS
except Exception as exc:
logger.error("Error while building the flow: %s", exc)
flow_data_store[flow_id]["status"] = BuildStatus.FAILURE
yield str(StreamData(event="error", data={"error": str(exc)}))
finally:
yield str(StreamData(event="message", data=final_response))
Expand Down
4 changes: 2 additions & 2 deletions src/backend/langflow/api/v1/endpoints.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
UploadFileResponse,
)

from langflow.interface.types import build_langchain_types_dict
from langflow.interface.types import langchain_types_dict
from langflow.database.base import get_session
from sqlmodel import Session

Expand All @@ -21,7 +21,7 @@

@router.get("/all")
def get_all():
return build_langchain_types_dict()
return langchain_types_dict


# For backwards compatibility we will keep the old endpoint
Expand Down
10 changes: 10 additions & 0 deletions src/backend/langflow/api/v1/schemas.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,20 @@
from enum import Enum
from pathlib import Path
from typing import Any, Dict, List, Optional, Union
from langflow.database.models.flow import FlowCreate, FlowRead
from pydantic import BaseModel, Field, validator
import json


class BuildStatus(Enum):
"""Status of the build."""

SUCCESS = "success"
FAILURE = "failure"
STARTED = "started"
IN_PROGRESS = "in_progress"


class GraphData(BaseModel):
"""Data inside the exported flow."""

Expand Down
2 changes: 2 additions & 0 deletions src/backend/langflow/chat/config.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
class ChatConfig:
streaming: bool = True
Loading

0 comments on commit 168a1b9

Please sign in to comment.