diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 12f72f5f8b..2a258949e9 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -14,9 +14,7 @@ env: jobs: if_release: - if: | - ${{ github.event.pull_request.merged == true }} - && ${{ contains(github.event.pull_request.labels.*.name, 'Release') }} + if: ${{ (github.event.pull_request.merged == true) && contains(github.event.pull_request.labels.*.name, 'Release') }} runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 diff --git a/.vscode/launch.json b/.vscode/launch.json index e09e76cc87..bb61b0b9e3 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -1,4 +1,5 @@ { + "version": "0.2.0", "configurations": [ { "name": "Debug Backend", @@ -38,6 +39,15 @@ "request": "launch", "url": "http://localhost:3000/", "webRoot": "${workspaceRoot}/src/frontend" + }, + { + "name": "Python: Debug Tests", + "type": "python", + "request": "launch", + "program": "${file}", + "purpose": ["debug-test"], + "console": "integratedTerminal", + "justMyCode": false } ] } diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index da7ec1977f..c58bb92f18 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -7,6 +7,11 @@ to contributions, whether it be in the form of a new feature, improved infra, or To contribute to this project, please follow a ["fork and pull request"](https://docs.github.com/en/get-started/quickstart/contributing-to-projects) workflow. Please do not try to push directly to this repo unless you are a maintainer. +The branch structure is as follows: + +- `main`: The stable version of Langflow +- `dev`: The development version of Langflow. This branch is used to test new features before they are merged into `main` and, as such, may be unstable. + ## 🗺️Contributing Guidelines ## 🚩GitHub Issues diff --git a/README.md b/README.md index dcd91e0755..9137ea714b 100644 --- a/README.md +++ b/README.md @@ -33,7 +33,7 @@ - [HuggingFace Spaces](#huggingface-spaces) - [🖥️ Command Line Interface (CLI)](#️-command-line-interface-cli) - [Usage](#usage) - - [Environment Variables](#environment-variables) + - [Environment Variables](#environment-variables) - [Deployment](#deployment) - [Deploy Langflow on Google Cloud Platform](#deploy-langflow-on-google-cloud-platform) - [Deploy Langflow on Jina AI Cloud](#deploy-langflow-on-jina-ai-cloud) @@ -112,7 +112,6 @@ Each option is detailed below: - `--cache`: Selects the type of cache to use. Options are `InMemoryCache` and `SQLiteCache`. Can be set using the `LANGFLOW_LANGCHAIN_CACHE` environment variable. The default is `SQLiteCache`. - `--jcloud/--no-jcloud`: Toggles the option to deploy on Jina AI Cloud. The default is `no-jcloud`. - `--dev/--no-dev`: Toggles the development mode. The default is `no-dev`. -- `--database-url`: Sets the database URL to connect to. If not provided, a local SQLite database will be used. Can be set using the `LANGFLOW_DATABASE_URL` environment variable. - `--path`: Specifies the path to the frontend directory containing build files. This option is for development purposes only. Can be set using the `LANGFLOW_FRONTEND_PATH` environment variable. - `--open-browser/--no-open-browser`: Toggles the option to open the browser after starting the server. Can be set using the `LANGFLOW_OPEN_BROWSER` environment variable. The default is `open-browser`. - `--remove-api-keys/--no-remove-api-keys`: Toggles the option to remove API keys from the projects saved in the database. Can be set using the `LANGFLOW_REMOVE_API_KEYS` environment variable. The default is `no-remove-api-keys`. @@ -276,6 +275,8 @@ flow("Hey, have you heard of Langflow?") We welcome contributions from developers of all levels to our open-source project on GitHub. If you'd like to contribute, please check our [contributing guidelines](./CONTRIBUTING.md) and help make Langflow more accessible. +--- + Join our [Discord](https://discord.com/invite/EqksyE2EX9) server to ask questions, make suggestions and showcase your projects! 🦾

diff --git a/dev.Dockerfile b/dev.Dockerfile index b38929db22..3fcc0803d8 100644 --- a/dev.Dockerfile +++ b/dev.Dockerfile @@ -15,4 +15,4 @@ COPY ./ ./ # Install dependencies RUN poetry config virtualenvs.create false && poetry install --no-interaction --no-ansi -CMD ["uvicorn", "langflow.main:app", "--host", "0.0.0.0", "--port", "5003", "--reload", "log-level", "debug"] \ No newline at end of file +CMD ["uvicorn","--factory", "langflow.main:create_app", "--host", "0.0.0.0", "--port", "5003", "--reload", "log-level", "debug"] \ No newline at end of file diff --git a/docker-compose.debug.yml b/docker-compose.debug.yml index 581bdc6da7..25cddd9f72 100644 --- a/docker-compose.debug.yml +++ b/docker-compose.debug.yml @@ -1,4 +1,4 @@ -version: '3.4' +version: "3.4" services: backend: @@ -7,7 +7,12 @@ services: build: context: ./ dockerfile: ./dev.Dockerfile - command: ["sh", "-c", "pip install debugpy -t /tmp && python /tmp/debugpy --wait-for-client --listen 0.0.0.0:5678 -m uvicorn langflow.main:app --host 0.0.0.0 --port 7860 --reload"] + command: + [ + "sh", + "-c", + "pip install debugpy -t /tmp && python /tmp/debugpy --wait-for-client --listen 0.0.0.0:5678 -m uvicorn --factory langflow.main:create_app --host 0.0.0.0 --port 7860 --reload", + ] ports: - 7860:7860 - 5678:5678 @@ -22,7 +27,7 @@ services: ports: - "3000:3000" volumes: - - ./src/frontend/public:/home/node/app/public - - ./src/frontend/src:/home/node/app/src - - ./src/frontend/package.json:/home/node/app/package.json - restart: on-failure \ No newline at end of file + - ./src/frontend/public:/home/node/app/public + - ./src/frontend/src:/home/node/app/src + - ./src/frontend/package.json:/home/node/app/package.json + restart: on-failure diff --git a/docker-compose.yml b/docker-compose.yml index 755d0794d3..961cd0e33e 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,4 +1,4 @@ -version: '3' +version: "3" services: backend: @@ -9,7 +9,7 @@ services: - "7860:7860" volumes: - ./:/app - command: bash -c "uvicorn langflow.main:app --host 0.0.0.0 --port 7860 --reload" + command: bash -c "uvicorn --factory langflow.main:create_app --host 0.0.0.0 --port 7860 --reload" frontend: build: @@ -22,7 +22,7 @@ services: ports: - "3000:3000" volumes: - - ./src/frontend/public:/home/node/app/public - - ./src/frontend/src:/home/node/app/src - - ./src/frontend/package.json:/home/node/app/package.json - restart: on-failure \ No newline at end of file + - ./src/frontend/public:/home/node/app/public + - ./src/frontend/src:/home/node/app/src + - ./src/frontend/package.json:/home/node/app/package.json + restart: on-failure diff --git a/docker_example/docker-compose.yml b/docker_example/docker-compose.yml index 40ceb25043..ffb0331047 100644 --- a/docker_example/docker-compose.yml +++ b/docker_example/docker-compose.yml @@ -6,5 +6,5 @@ services: context: . dockerfile: Dockerfile ports: - - "5003:5003" + - "7860:7860" command: langflow --host 0.0.0.0 diff --git a/docs/docs/guidelines/custom-component.mdx b/docs/docs/guidelines/custom-component.mdx index bcd6372221..e649ef6e95 100644 --- a/docs/docs/guidelines/custom-component.mdx +++ b/docs/docs/guidelines/custom-component.mdx @@ -385,17 +385,17 @@ Your structure should look something like this: ### Loading Custom Components -You can specify the path to your custom components using the _`--components-path`_ argument when running the Langflow CLI, as shown below: +The recommended way to load custom components is to set the _`LANGFLOW_COMPONENTS_PATH`_ environment variable to the path of your custom components directory. Then, run the Langflow CLI as usual. ```bash -langflow --components-path /path/to/components +export LANGFLOW_COMPONENTS_PATH=/path/to/components +langflow ``` -Alternatively, you can set the `LANGFLOW_COMPONENTS_PATH` environment variable: +Alternatively, you can specify the path to your custom components using the _`--components-path`_ argument when running the Langflow CLI, as shown below: ```bash -export LANGFLOW_COMPONENTS_PATH=/path/to/components -langflow +langflow --components-path /path/to/components ``` Langflow will attempt to load all of the components found in the specified directory. If a component fails to load due to errors in the component's code, Langflow will print an error message to the console but will continue loading the rest of the components. diff --git a/docs/docs/guides/loading_document.mdx b/docs/docs/guides/loading_document.mdx index d760e91245..73fb859684 100644 --- a/docs/docs/guides/loading_document.mdx +++ b/docs/docs/guides/loading_document.mdx @@ -43,7 +43,7 @@ This guide takes you through the process of augmenting the "Basic Chat with Prom 8. Connect this loader to the `{context}` variable that we just added. -9. In the "Web Page" field, enter "https://langflow.org/how-upload-examples". +9. In the "Web Page" field, enter "https://docs.langflow.org/how-upload-examples". 10. Now, click on "ConversationBufferMemory". diff --git a/docs/static/CNAME b/docs/static/CNAME index 3e6c5c0239..ab1d0c8495 100644 --- a/docs/static/CNAME +++ b/docs/static/CNAME @@ -1 +1 @@ -langflow.org \ No newline at end of file +docs.langflow.org \ No newline at end of file diff --git a/poetry.lock b/poetry.lock index 058cb36f98..745b9be4d1 100644 --- a/poetry.lock +++ b/poetry.lock @@ -144,15 +144,34 @@ files = [ {file = "aiostream-0.4.5.tar.gz", hash = "sha256:3ecbf87085230fbcd9605c32ca20c4fb41af02c71d076eab246ea22e35947d88"}, ] +[[package]] +name = "alembic" +version = "1.11.2" +description = "A database migration tool for SQLAlchemy." +optional = false +python-versions = ">=3.7" +files = [ + {file = "alembic-1.11.2-py3-none-any.whl", hash = "sha256:7981ab0c4fad4fe1be0cf183aae17689fe394ff874fd2464adb774396faf0796"}, + {file = "alembic-1.11.2.tar.gz", hash = "sha256:678f662130dc540dac12de0ea73de9f89caea9dbea138f60ef6263149bf84657"}, +] + +[package.dependencies] +Mako = "*" +SQLAlchemy = ">=1.3.0" +typing-extensions = ">=4" + +[package.extras] +tz = ["python-dateutil"] + [[package]] name = "anthropic" -version = "0.3.7" +version = "0.3.8" description = "Client library for the anthropic API" optional = false python-versions = ">=3.7,<4.0" files = [ - {file = "anthropic-0.3.7-py3-none-any.whl", hash = "sha256:ca57635d7f13d609aa8a5b93a834e067760d96b9657bdf81e0c7444ddf41fc64"}, - {file = "anthropic-0.3.7.tar.gz", hash = "sha256:0453f80ba8224364c8b0dae0b5088becd67277de57708d7b887ebb6c2ceb3c49"}, + {file = "anthropic-0.3.8-py3-none-any.whl", hash = "sha256:97ffe1bacc4214dc89b19f496cf2769746971e86f7c835a05aa21b76f260d279"}, + {file = "anthropic-0.3.8.tar.gz", hash = "sha256:6651099807456c3b95b3879f5ad7d00f7e7e4f7649a2394d18032ab8be54ef16"}, ] [package.dependencies] @@ -773,18 +792,19 @@ sqlalchemy = ["sqlalchemy (>1.3.21,<2.0)"] [[package]] name = "cohere" -version = "4.17.0" +version = "4.19.2" description = "" optional = false python-versions = ">=3.7,<4.0" files = [ - {file = "cohere-4.17.0-py3-none-any.whl", hash = "sha256:44e0bdb0a2d9467506d27b285f542177b98f92647f27e17ea921a01006fe2f33"}, - {file = "cohere-4.17.0.tar.gz", hash = "sha256:9f479543b50490b4cb6385468d7571ad891a09cde7bd6b028171596bac6ce6ff"}, + {file = "cohere-4.19.2-py3-none-any.whl", hash = "sha256:0b6a4fe04380a481a8e975ebcc9bb6433febe4d3eb583b6d6e04342a5e998345"}, + {file = "cohere-4.19.2.tar.gz", hash = "sha256:a0b0fa698b3d3983fb328bb90d68fcf08faaa2268f3772ebc6bfea6ba55acf27"}, ] [package.dependencies] aiohttp = ">=3.0,<4.0" backoff = ">=2.0,<3.0" +fastavro = {version = "1.8.2", markers = "python_version >= \"3.8\""} importlib_metadata = ">=6.0,<7.0" requests = ">=2.25.0,<3.0.0" urllib3 = ">=1.26,<3" @@ -958,13 +978,13 @@ test-randomorder = ["pytest-randomly"] [[package]] name = "ctransformers" -version = "0.2.17" +version = "0.2.21" description = "Python bindings for the Transformer models implemented in C/C++ using GGML library." optional = true python-versions = "*" files = [ - {file = "ctransformers-0.2.17-py3-none-any.whl", hash = "sha256:903c16b38f5b2750ee34b90107c3a72351d7a9a201a6987a6560bd50874e9698"}, - {file = "ctransformers-0.2.17.tar.gz", hash = "sha256:0c9de34cc8295ba6cb940e413130e6658fac54a99cecfa6098ac04638fd9301e"}, + {file = "ctransformers-0.2.21-py3-none-any.whl", hash = "sha256:18a0555d02f55a3935f5544b885038562f80e497a6197d8e871941a087dba546"}, + {file = "ctransformers-0.2.21.tar.gz", hash = "sha256:58e7a699050a106688b967faa59f377886e22a581fde6cd36821dfa541995677"}, ] [package.dependencies] @@ -972,6 +992,8 @@ huggingface-hub = "*" py-cpuinfo = ">=9.0.0,<10.0.0" [package.extras] +cuda = ["nvidia-cublas-cu12", "nvidia-cuda-runtime-cu12"] +gptq = ["exllama (==0.1.0)"] tests = ["pytest"] [[package]] @@ -1616,13 +1638,13 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] [[package]] name = "google-api-python-client" -version = "2.95.0" +version = "2.96.0" description = "Google API Client Library for Python" optional = false python-versions = ">=3.7" files = [ - {file = "google-api-python-client-2.95.0.tar.gz", hash = "sha256:d2731ede12f79e53fbe11fdb913dfe986440b44c0a28431c78a8ec275f4c1541"}, - {file = "google_api_python_client-2.95.0-py2.py3-none-any.whl", hash = "sha256:a8aab2da678f42a01f2f52108f787fef4310f23f9dd917c4e64664c3f0c885ba"}, + {file = "google-api-python-client-2.96.0.tar.gz", hash = "sha256:f712373d03d338af57b9f5fe98c91f4b5baaa8765469b015bc623c4681c5bd51"}, + {file = "google_api_python_client-2.96.0-py2.py3-none-any.whl", hash = "sha256:38c2b61b10d15bb41ec8f89303e3837ec2d2c3e4e38de5800c05ee322492f937"}, ] [package.dependencies] @@ -1675,13 +1697,13 @@ six = "*" [[package]] name = "google-cloud-aiplatform" -version = "1.28.1" +version = "1.29.0" description = "Vertex AI API client library" optional = false python-versions = ">=3.7" files = [ - {file = "google-cloud-aiplatform-1.28.1.tar.gz", hash = "sha256:b6468db7dc50295c988edf6505f0bf4d4bb2321de28873b2a4a87fd384be6308"}, - {file = "google_cloud_aiplatform-1.28.1-py2.py3-none-any.whl", hash = "sha256:5587b8d4599047117b0c787635a00e8e3893b75944993e2faf784176442e9de4"}, + {file = "google-cloud-aiplatform-1.29.0.tar.gz", hash = "sha256:fceabb924d2d26057e3c8c5c2e251929389aa6d553361377bc402781150c0db3"}, + {file = "google_cloud_aiplatform-1.29.0-py2.py3-none-any.whl", hash = "sha256:cf81c1d93c61ccf3df60a65e3a5a1e465e044059d36b6fc1202b940c46c4c1e1"}, ] [package.dependencies] @@ -1762,13 +1784,13 @@ grpc = ["grpcio (>=1.38.0,<2.0dev)"] [[package]] name = "google-cloud-resource-manager" -version = "1.10.2" +version = "1.10.3" description = "Google Cloud Resource Manager API client library" optional = false python-versions = ">=3.7" files = [ - {file = "google-cloud-resource-manager-1.10.2.tar.gz", hash = "sha256:9a7bdd0347ad553376cc66ad317c5223d1ae04bdcf74edcbfcd12605cff7b510"}, - {file = "google_cloud_resource_manager-1.10.2-py2.py3-none-any.whl", hash = "sha256:9e074c28326bd1632f1a270c20cfea1ffe98f49cf821033e65bdac55661ffbd5"}, + {file = "google-cloud-resource-manager-1.10.3.tar.gz", hash = "sha256:f80efcea36f10c5a81889afe93910926e3978b4b1ceeb82f563a2fc863072d14"}, + {file = "google_cloud_resource_manager-1.10.3-py2.py3-none-any.whl", hash = "sha256:1381a4b0f522248ebe0ebd1289d8822b99c54f4e1fe03924a6e723b2ed93dd7f"}, ] [package.dependencies] @@ -2465,13 +2487,13 @@ testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs [[package]] name = "importlib-resources" -version = "6.0.0" +version = "6.0.1" description = "Read resources from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_resources-6.0.0-py3-none-any.whl", hash = "sha256:d952faee11004c045f785bb5636e8f885bed30dc3c940d5d42798a2a4541c185"}, - {file = "importlib_resources-6.0.0.tar.gz", hash = "sha256:4cf94875a8368bd89531a756df9a9ebe1f150e0f885030b461237bc7f2d905f2"}, + {file = "importlib_resources-6.0.1-py3-none-any.whl", hash = "sha256:134832a506243891221b88b4ae1213327eea96ceb4e407a00d790bb0626f45cf"}, + {file = "importlib_resources-6.0.1.tar.gz", hash = "sha256:4359457e42708462b9626a04657c6208ad799ceb41e5c58c57ffa0e6a098a5d4"}, ] [package.dependencies] @@ -2505,13 +2527,13 @@ files = [ [[package]] name = "ipykernel" -version = "6.25.0" +version = "6.25.1" description = "IPython Kernel for Jupyter" optional = false python-versions = ">=3.8" files = [ - {file = "ipykernel-6.25.0-py3-none-any.whl", hash = "sha256:f0042e867ac3f6bca1679e6a88cbd6a58ed93a44f9d0866aecde6efe8de76659"}, - {file = "ipykernel-6.25.0.tar.gz", hash = "sha256:e342ce84712861be4b248c4a73472be4702c1b0dd77448bfd6bcfb3af9d5ddf9"}, + {file = "ipykernel-6.25.1-py3-none-any.whl", hash = "sha256:c8a2430b357073b37c76c21c52184db42f6b4b0e438e1eb7df3c4440d120497c"}, + {file = "ipykernel-6.25.1.tar.gz", hash = "sha256:050391364c0977e768e354bdb60cbbfbee7cbb943b1af1618382021136ffd42f"}, ] [package.dependencies] @@ -2595,12 +2617,12 @@ testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", [[package]] name = "jcloud" -version = "0.2.14" +version = "0.2.16" description = "Simplify deploying and managing Jina projects on Jina Cloud" optional = false python-versions = "*" files = [ - {file = "jcloud-0.2.14.tar.gz", hash = "sha256:60798a41be370cacf380bb024f04086df8b0e65f20e51b4fbe1dd11e04d95be7"}, + {file = "jcloud-0.2.16.tar.gz", hash = "sha256:abf39a70fc5852574a05e03dac5e5cc364df87ae1f8476cbd441178f14adb578"}, ] [package.dependencies] @@ -2887,13 +2909,13 @@ testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", [[package]] name = "langchain" -version = "0.0.250" +version = "0.0.256" description = "Building applications with LLMs through composability" optional = false python-versions = ">=3.8.1,<4.0" files = [ - {file = "langchain-0.0.250-py3-none-any.whl", hash = "sha256:65b3520f507e848edd88a35a70700971bbbf822fda65f621ccf44a3bb36ad03a"}, - {file = "langchain-0.0.250.tar.gz", hash = "sha256:1b5775d6a472f633bb06e794f58cb6ff5d1eeb2da603b64a6a15013f8f61ee3f"}, + {file = "langchain-0.0.256-py3-none-any.whl", hash = "sha256:3389fcb85d8d4fb16bae5ca9995d3ce634a3330f8ac1f458afc6171e4ca52de5"}, + {file = "langchain-0.0.256.tar.gz", hash = "sha256:b80115e19f86199c49bca8ef18c09d2d87548332a0144a1c5ce6a2f82e4f5f9c"}, ] [package.dependencies] @@ -2905,7 +2927,7 @@ numexpr = ">=2.8.4,<3.0.0" numpy = ">=1,<2" openapi-schema-pydantic = ">=1.2,<2.0" pydantic = ">=1,<2" -PyYAML = ">=5.4.1" +PyYAML = ">=5.3" requests = ">=2,<3" SQLAlchemy = ">=1.4,<3" tenacity = ">=8.1.0,<9.0.0" @@ -2917,26 +2939,40 @@ clarifai = ["clarifai (>=9.1.0)"] cohere = ["cohere (>=4,<5)"] docarray = ["docarray[hnswlib] (>=0.32.0,<0.33.0)"] embeddings = ["sentence-transformers (>=2,<3)"] -extended-testing = ["atlassian-python-api (>=3.36.0,<4.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.0.7,<0.0.8)", "chardet (>=5.1.0,<6.0.0)", "esprima (>=4.0.1,<5.0.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "gql (>=3.4.1,<4.0.0)", "html2text (>=2020.1.16,<2021.0.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "lxml (>=4.9.2,<5.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "openai (>=0,<1)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "tqdm (>=4.48.0)", "xinference (>=0.0.6,<0.0.7)", "zep-python (>=0.32)"] +extended-testing = ["amazon-textract-caller (<2)", "atlassian-python-api (>=3.36.0,<4.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.0.7,<0.0.8)", "chardet (>=5.1.0,<6.0.0)", "esprima (>=4.0.1,<5.0.0)", "feedparser (>=6.0.10,<7.0.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "gql (>=3.4.1,<4.0.0)", "html2text (>=2020.1.16,<2021.0.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "lxml (>=4.9.2,<5.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "openai (>=0,<1)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "tqdm (>=4.48.0)", "xata (>=1.0.0a7,<2.0.0)", "xinference (>=0.0.6,<0.0.7)", "zep-python (>=0.32)"] javascript = ["esprima (>=4.0.1,<5.0.0)"] llms = ["anthropic (>=0.3,<0.4)", "clarifai (>=9.1.0)", "cohere (>=4,<5)", "huggingface_hub (>=0,<1)", "manifest-ml (>=0.0.1,<0.0.2)", "nlpcloud (>=1,<2)", "openai (>=0,<1)", "openllm (>=0.1.19)", "openlm (>=0.0.5,<0.0.6)", "torch (>=1,<3)", "transformers (>=4,<5)", "xinference (>=0.0.6,<0.0.7)"] openai = ["openai (>=0,<1)", "tiktoken (>=0.3.2,<0.4.0)"] qdrant = ["qdrant-client (>=1.3.1,<2.0.0)"] text-helpers = ["chardet (>=5.1.0,<6.0.0)"] +[[package]] +name = "langchain-experimental" +version = "0.0.8" +description = "Building applications with LLMs through composability" +optional = false +python-versions = ">=3.8.1,<4.0" +files = [ + {file = "langchain_experimental-0.0.8-py3-none-any.whl", hash = "sha256:34cf202ba29fdef178c5d68772cf2fa08dd2a0fad588ca3ef324ae71e596cc21"}, + {file = "langchain_experimental-0.0.8.tar.gz", hash = "sha256:35d198f8e70a053ccd84273198ff08b4b700ac03ec5f43ba9b7ef797dd42ad14"}, +] + +[package.dependencies] +langchain = ">=0.0.239" + [[package]] name = "langchain-serve" -version = "0.0.58" +version = "0.0.59" description = "Langchain Serve - serve your langchain apps on Jina AI Cloud." optional = true python-versions = "*" files = [ - {file = "langchain-serve-0.0.58.tar.gz", hash = "sha256:9ec65c8eeda9f777fb8b79117d92839873b10030fd832184ba891d4d3a065fff"}, + {file = "langchain-serve-0.0.59.tar.gz", hash = "sha256:28ff4ba1b640223158413296ac5b9198eac7cbd206d3386855ad00066a9f91d6"}, ] [package.dependencies] click = "*" -jcloud = ">=0.2.13" +jcloud = ">=0.2.16" jina = "3.15.2" jina-hubble-sdk = "*" langchain = "*" @@ -2951,13 +2987,13 @@ test = ["psutil", "pytest", "pytest-asyncio"] [[package]] name = "langsmith" -version = "0.0.16" +version = "0.0.19" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." optional = false python-versions = ">=3.8.1,<4.0" files = [ - {file = "langsmith-0.0.16-py3-none-any.whl", hash = "sha256:a9a5b30dc1c0ee0c9f8bbd2076856f1db3856a9521464f130be1db519c7077e5"}, - {file = "langsmith-0.0.16.tar.gz", hash = "sha256:0729df30dd5b1c9d0c83614b5d348c7358ac182d7e3003dafbfcbedb88cbc8b8"}, + {file = "langsmith-0.0.19-py3-none-any.whl", hash = "sha256:ae240030fd0b98e9467fbf19ac6d58a0a4ffcc1db8462625141dae6178e62c68"}, + {file = "langsmith-0.0.19.tar.gz", hash = "sha256:e91a2cd101456e2f8d6015c9ea371d6556eb6072a1b20d4793479855163ae28f"}, ] [package.dependencies] @@ -3165,6 +3201,25 @@ docs = ["sphinx (>=1.6.0)", "sphinx-bootstrap-theme"] flake8 = ["flake8"] tests = ["psutil", "pytest (!=3.3.0)", "pytest-cov"] +[[package]] +name = "mako" +version = "1.2.4" +description = "A super-fast templating language that borrows the best ideas from the existing templating languages." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Mako-1.2.4-py3-none-any.whl", hash = "sha256:c97c79c018b9165ac9922ae4f32da095ffd3c4e6872b45eded42926deea46818"}, + {file = "Mako-1.2.4.tar.gz", hash = "sha256:d60a3903dc3bb01a18ad6a89cdbe2e4eadc69c0bc8ef1e3773ba53d44c3f7a34"}, +] + +[package.dependencies] +MarkupSafe = ">=0.9.2" + +[package.extras] +babel = ["Babel"] +lingua = ["lingua"] +testing = ["pytest"] + [[package]] name = "markdown" version = "3.4.4" @@ -3213,7 +3268,7 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] name = "markupsafe" version = "2.1.3" description = "Safely add untrusted strings to HTML/XML markup." -optional = true +optional = false python-versions = ">=3.7" files = [ {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"}, @@ -3345,13 +3400,13 @@ files = [ [[package]] name = "more-itertools" -version = "10.0.0" +version = "10.1.0" description = "More routines for operating on iterables, beyond itertools" optional = false python-versions = ">=3.8" files = [ - {file = "more-itertools-10.0.0.tar.gz", hash = "sha256:cd65437d7c4b615ab81c0640c0480bc29a550ea032891977681efd28344d51e1"}, - {file = "more_itertools-10.0.0-py3-none-any.whl", hash = "sha256:928d514ffd22b5b0a8fce326d57f423a55d2ff783b093bab217eda71e732330f"}, + {file = "more-itertools-10.1.0.tar.gz", hash = "sha256:626c369fa0eb37bac0291bce8259b332fd59ac792fa5497b59837309cd5b114a"}, + {file = "more_itertools-10.1.0-py3-none-any.whl", hash = "sha256:64e0735fcfdc6f3464ea133afe8ea4483b1c5fe3a3d69852e6503b43a0b222e6"}, ] [[package]] @@ -3612,41 +3667,41 @@ twitter = ["twython"] [[package]] name = "numexpr" -version = "2.8.4" +version = "2.8.5" description = "Fast numerical expression evaluator for NumPy" optional = false python-versions = ">=3.7" files = [ - {file = "numexpr-2.8.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a75967d46b6bd56455dd32da6285e5ffabe155d0ee61eef685bbfb8dafb2e484"}, - {file = "numexpr-2.8.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:db93cf1842f068247de631bfc8af20118bf1f9447cd929b531595a5e0efc9346"}, - {file = "numexpr-2.8.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bca95f4473b444428061d4cda8e59ac564dc7dc6a1dea3015af9805c6bc2946"}, - {file = "numexpr-2.8.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e34931089a6bafc77aaae21f37ad6594b98aa1085bb8b45d5b3cd038c3c17d9"}, - {file = "numexpr-2.8.4-cp310-cp310-win32.whl", hash = "sha256:f3a920bfac2645017110b87ddbe364c9c7a742870a4d2f6120b8786c25dc6db3"}, - {file = "numexpr-2.8.4-cp310-cp310-win_amd64.whl", hash = "sha256:6931b1e9d4f629f43c14b21d44f3f77997298bea43790cfcdb4dd98804f90783"}, - {file = "numexpr-2.8.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9400781553541f414f82eac056f2b4c965373650df9694286b9bd7e8d413f8d8"}, - {file = "numexpr-2.8.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6ee9db7598dd4001138b482342b96d78110dd77cefc051ec75af3295604dde6a"}, - {file = "numexpr-2.8.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ff5835e8af9a212e8480003d731aad1727aaea909926fd009e8ae6a1cba7f141"}, - {file = "numexpr-2.8.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:655d84eb09adfee3c09ecf4a89a512225da153fdb7de13c447404b7d0523a9a7"}, - {file = "numexpr-2.8.4-cp311-cp311-win32.whl", hash = "sha256:5538b30199bfc68886d2be18fcef3abd11d9271767a7a69ff3688defe782800a"}, - {file = "numexpr-2.8.4-cp311-cp311-win_amd64.whl", hash = "sha256:3f039321d1c17962c33079987b675fb251b273dbec0f51aac0934e932446ccc3"}, - {file = "numexpr-2.8.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c867cc36cf815a3ec9122029874e00d8fbcef65035c4a5901e9b120dd5d626a2"}, - {file = "numexpr-2.8.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:059546e8f6283ccdb47c683101a890844f667fa6d56258d48ae2ecf1b3875957"}, - {file = "numexpr-2.8.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:845a6aa0ed3e2a53239b89c1ebfa8cf052d3cc6e053c72805e8153300078c0b1"}, - {file = "numexpr-2.8.4-cp37-cp37m-win32.whl", hash = "sha256:a38664e699526cb1687aefd9069e2b5b9387da7feac4545de446141f1ef86f46"}, - {file = "numexpr-2.8.4-cp37-cp37m-win_amd64.whl", hash = "sha256:eaec59e9bf70ff05615c34a8b8d6c7bd042bd9f55465d7b495ea5436f45319d0"}, - {file = "numexpr-2.8.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b318541bf3d8326682ebada087ba0050549a16d8b3fa260dd2585d73a83d20a7"}, - {file = "numexpr-2.8.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b076db98ca65eeaf9bd224576e3ac84c05e451c0bd85b13664b7e5f7b62e2c70"}, - {file = "numexpr-2.8.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:90f12cc851240f7911a47c91aaf223dba753e98e46dff3017282e633602e76a7"}, - {file = "numexpr-2.8.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c368aa35ae9b18840e78b05f929d3a7b3abccdba9630a878c7db74ca2368339"}, - {file = "numexpr-2.8.4-cp38-cp38-win32.whl", hash = "sha256:b96334fc1748e9ec4f93d5fadb1044089d73fb08208fdb8382ed77c893f0be01"}, - {file = "numexpr-2.8.4-cp38-cp38-win_amd64.whl", hash = "sha256:a6d2d7740ae83ba5f3531e83afc4b626daa71df1ef903970947903345c37bd03"}, - {file = "numexpr-2.8.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:77898fdf3da6bb96aa8a4759a8231d763a75d848b2f2e5c5279dad0b243c8dfe"}, - {file = "numexpr-2.8.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:df35324666b693f13a016bc7957de7cc4d8801b746b81060b671bf78a52b9037"}, - {file = "numexpr-2.8.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17ac9cfe6d0078c5fc06ba1c1bbd20b8783f28c6f475bbabd3cad53683075cab"}, - {file = "numexpr-2.8.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df3a1f6b24214a1ab826e9c1c99edf1686c8e307547a9aef33910d586f626d01"}, - {file = "numexpr-2.8.4-cp39-cp39-win32.whl", hash = "sha256:7d71add384adc9119568d7e9ffa8a35b195decae81e0abf54a2b7779852f0637"}, - {file = "numexpr-2.8.4-cp39-cp39-win_amd64.whl", hash = "sha256:9f096d707290a6a00b6ffdaf581ee37331109fb7b6c8744e9ded7c779a48e517"}, - {file = "numexpr-2.8.4.tar.gz", hash = "sha256:d5432537418d18691b9115d615d6daa17ee8275baef3edf1afbbf8bc69806147"}, + {file = "numexpr-2.8.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:51f3ab160c3847ebcca93cd88f935a7802b54a01ab63fe93152994a64d7a6cf2"}, + {file = "numexpr-2.8.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:de29c77f674e4eb8f0846525a475cab64008c227c8bc4ba5153ab3f72441cc63"}, + {file = "numexpr-2.8.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf85ba1327eb87ec82ae7936f13c8850fb969a0ca34f3ba9fa3897c09d5c80d7"}, + {file = "numexpr-2.8.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c00be69f747f44a631830215cab482f0f77f75af2925695adff57c1cc0f9a68"}, + {file = "numexpr-2.8.5-cp310-cp310-win32.whl", hash = "sha256:c46350dcdb93e32f033eea5a21269514ffcaf501d9abd6036992d37e48a308b0"}, + {file = "numexpr-2.8.5-cp310-cp310-win_amd64.whl", hash = "sha256:894b027438b8ec88dea32a19193716c79f4ff8ddb92302dcc9731b51ba3565a8"}, + {file = "numexpr-2.8.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6df184d40d4cf9f21c71f429962f39332f7398147762588c9f3a5c77065d0c06"}, + {file = "numexpr-2.8.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:178b85ad373c6903e55d75787d61b92380439b70d94b001cb055a501b0821335"}, + {file = "numexpr-2.8.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:578fe4008e4d5d6ff01bbeb2d7b7ba1ec658a5cda9c720cd26a9a8325f8ef438"}, + {file = "numexpr-2.8.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef621b4ee366a5c6a484f6678c9259f5b826569f8bfa0b89ba2306d5055468bb"}, + {file = "numexpr-2.8.5-cp311-cp311-win32.whl", hash = "sha256:dd57ab1a3d3aaa9274aff1cefbf93b8ddacc7973afef5b125905f6bf18fabab0"}, + {file = "numexpr-2.8.5-cp311-cp311-win_amd64.whl", hash = "sha256:783324ba40eb804ecfc9ebae86120a1e339ab112d0ab8a1f0d48a26354d5bf9b"}, + {file = "numexpr-2.8.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:183d5430db76826e54465c69db93a3c6ecbf03cda5aa1bb96eaad0147e9b68dc"}, + {file = "numexpr-2.8.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39ce106f92ccea5b07b1d6f2f3c4370f05edf27691dc720a63903484a2137e48"}, + {file = "numexpr-2.8.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b594dc9e2d6291a0bc5c065e6d9caf3eee743b5663897832e9b17753c002947a"}, + {file = "numexpr-2.8.5-cp37-cp37m-win32.whl", hash = "sha256:62b4faf8e0627673b0210a837792bddd23050ecebc98069ab23eb0633ff1ef5f"}, + {file = "numexpr-2.8.5-cp37-cp37m-win_amd64.whl", hash = "sha256:db5c65417d69414f1ab31302ea01d3548303ef31209c38b4849d145be4e1d1ba"}, + {file = "numexpr-2.8.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:eb36ffcfa1606e41aa08d559b4277bcad0e16b83941d1a4fee8d2bd5a34f8e0e"}, + {file = "numexpr-2.8.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:34af2a0e857d02a4bc5758bc037a777d50dacb13bcd57c7905268a3e44994ed6"}, + {file = "numexpr-2.8.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a8dad2bfaad5a5c34a2e8bbf62b9df1dfab266d345fda1feb20ff4e264b347a"}, + {file = "numexpr-2.8.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b93f5a866cd13a808bc3d3a9c487d94cd02eec408b275ff0aa150f2e8e5191f8"}, + {file = "numexpr-2.8.5-cp38-cp38-win32.whl", hash = "sha256:558390fea6370003ac749ed9d0f38d708aa096f5dcb707ddb6e0ca5a0dd37da1"}, + {file = "numexpr-2.8.5-cp38-cp38-win_amd64.whl", hash = "sha256:55983806815035eb63c5039520688c49536bb7f3cc3fc1d7d64c6a00cf3f353e"}, + {file = "numexpr-2.8.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1510da20e6f5f45333610b1ded44c566e2690c6c437c84f2a212ca09627c7e01"}, + {file = "numexpr-2.8.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9e8b5bf7bcb4e8dcd66522d8fc96e1db7278f901cb4fd2e155efbe62a41dde08"}, + {file = "numexpr-2.8.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ed0e1c1ef5f34381448539f1fe9015906d21c9cfa2797c06194d4207dadb465"}, + {file = "numexpr-2.8.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aea6ab45c87c0a7041183c08a798f0ad4d7c5eccbce20cfe79ce6f1a45ef3702"}, + {file = "numexpr-2.8.5-cp39-cp39-win32.whl", hash = "sha256:cbfd833ee5fdb0efb862e152aee7e6ccea9c596d5c11d22604c2e6307bff7cad"}, + {file = "numexpr-2.8.5-cp39-cp39-win_amd64.whl", hash = "sha256:283ce8609a7ccbadf91a68f3484558b3e36d27c93c98a41ec205efb0ab43c872"}, + {file = "numexpr-2.8.5.tar.gz", hash = "sha256:45ed41e55a0abcecf3d711481e12a5fb7a904fe99d42bc282a17cc5f8ea510be"}, ] [package.dependencies] @@ -4053,68 +4108,78 @@ files = [ [[package]] name = "orjson" -version = "3.9.2" +version = "3.9.3" description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" optional = false python-versions = ">=3.7" files = [ - {file = "orjson-3.9.2-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:7323e4ca8322b1ecb87562f1ec2491831c086d9faa9a6c6503f489dadbed37d7"}, - {file = "orjson-3.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1272688ea1865f711b01ba479dea2d53e037ea00892fd04196b5875f7021d9d3"}, - {file = "orjson-3.9.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0b9a26f1d1427a9101a1e8910f2e2df1f44d3d18ad5480ba031b15d5c1cb282e"}, - {file = "orjson-3.9.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6a5ca55b0d8f25f18b471e34abaee4b175924b6cd62f59992945b25963443141"}, - {file = "orjson-3.9.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:877872db2c0f41fbe21f852ff642ca842a43bc34895b70f71c9d575df31fffb4"}, - {file = "orjson-3.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a39c2529d75373b7167bf84c814ef9b8f3737a339c225ed6c0df40736df8748"}, - {file = "orjson-3.9.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:84ebd6fdf138eb0eb4280045442331ee71c0aab5e16397ba6645f32f911bfb37"}, - {file = "orjson-3.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5a60a1cfcfe310547a1946506dd4f1ed0a7d5bd5b02c8697d9d5dcd8d2e9245e"}, - {file = "orjson-3.9.2-cp310-none-win_amd64.whl", hash = "sha256:c290c4f81e8fd0c1683638802c11610b2f722b540f8e5e858b6914b495cf90c8"}, - {file = "orjson-3.9.2-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:02ef014f9a605e84b675060785e37ec9c0d2347a04f1307a9d6840ab8ecd6f55"}, - {file = "orjson-3.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:992af54265ada1c1579500d6594ed73fe333e726de70d64919cf37f93defdd06"}, - {file = "orjson-3.9.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a40958f7af7c6d992ee67b2da4098dca8b770fc3b4b3834d540477788bfa76d3"}, - {file = "orjson-3.9.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:93864dec3e3dd058a2dbe488d11ac0345214a6a12697f53a63e34de7d28d4257"}, - {file = "orjson-3.9.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16fdf5a82df80c544c3c91516ab3882cd1ac4f1f84eefeafa642e05cef5f6699"}, - {file = "orjson-3.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:275b5a18fd9ed60b2720543d3ddac170051c43d680e47d04ff5203d2c6d8ebf1"}, - {file = "orjson-3.9.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b9aea6dcb99fcbc9f6d1dd84fca92322fda261da7fb014514bb4689c7c2097a8"}, - {file = "orjson-3.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7d74ae0e101d17c22ef67b741ba356ab896fc0fa64b301c2bf2bb0a4d874b190"}, - {file = "orjson-3.9.2-cp311-none-win_amd64.whl", hash = "sha256:6320b28e7bdb58c3a3a5efffe04b9edad3318d82409e84670a9b24e8035a249d"}, - {file = "orjson-3.9.2-cp37-cp37m-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:368e9cc91ecb7ac21f2aa475e1901204110cf3e714e98649c2502227d248f947"}, - {file = "orjson-3.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58e9e70f0dcd6a802c35887f306b555ff7a214840aad7de24901fc8bd9cf5dde"}, - {file = "orjson-3.9.2-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:00c983896c2e01c94c0ef72fd7373b2aa06d0c0eed0342c4884559f812a6835b"}, - {file = "orjson-3.9.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ee743e8890b16c87a2f89733f983370672272b61ee77429c0a5899b2c98c1a7"}, - {file = "orjson-3.9.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7b065942d362aad4818ff599d2f104c35a565c2cbcbab8c09ec49edba91da75"}, - {file = "orjson-3.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e46e9c5b404bb9e41d5555762fd410d5466b7eb1ec170ad1b1609cbebe71df21"}, - {file = "orjson-3.9.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:8170157288714678ffd64f5de33039e1164a73fd8b6be40a8a273f80093f5c4f"}, - {file = "orjson-3.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e3e2f087161947dafe8319ea2cfcb9cea4bb9d2172ecc60ac3c9738f72ef2909"}, - {file = "orjson-3.9.2-cp37-none-win_amd64.whl", hash = "sha256:d7de3dbbe74109ae598692113cec327fd30c5a30ebca819b21dfa4052f7b08ef"}, - {file = "orjson-3.9.2-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:8cd4385c59bbc1433cad4a80aca65d2d9039646a9c57f8084897549b55913b17"}, - {file = "orjson-3.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a74036aab1a80c361039290cdbc51aa7adc7ea13f56e5ef94e9be536abd227bd"}, - {file = "orjson-3.9.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1aaa46d7d4ae55335f635eadc9be0bd9bcf742e6757209fc6dc697e390010adc"}, - {file = "orjson-3.9.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e52c67ed6bb368083aa2078ea3ccbd9721920b93d4b06c43eb4e20c4c860046"}, - {file = "orjson-3.9.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1a6cdfcf9c7dd4026b2b01fdff56986251dc0cc1e980c690c79eec3ae07b36e7"}, - {file = "orjson-3.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1882a70bb69595b9ec5aac0040a819e94d2833fe54901e2b32f5e734bc259a8b"}, - {file = "orjson-3.9.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:fc05e060d452145ab3c0b5420769e7356050ea311fc03cb9d79c481982917cca"}, - {file = "orjson-3.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f8bc2c40d9bb26efefb10949d261a47ca196772c308babc538dd9f4b73e8d386"}, - {file = "orjson-3.9.2-cp38-none-win_amd64.whl", hash = "sha256:3164fc20a585ec30a9aff33ad5de3b20ce85702b2b2a456852c413e3f0d7ab09"}, - {file = "orjson-3.9.2-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:7a6ccadf788531595ed4728aa746bc271955448d2460ff0ef8e21eb3f2a281ba"}, - {file = "orjson-3.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3245d230370f571c945f69aab823c279a868dc877352817e22e551de155cb06c"}, - {file = "orjson-3.9.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:205925b179550a4ee39b8418dd4c94ad6b777d165d7d22614771c771d44f57bd"}, - {file = "orjson-3.9.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0325fe2d69512187761f7368c8cda1959bcb75fc56b8e7a884e9569112320e57"}, - {file = "orjson-3.9.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:806704cd58708acc66a064a9a58e3be25cf1c3f9f159e8757bd3f515bfabdfa1"}, - {file = "orjson-3.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03fb36f187a0c19ff38f6289418863df8b9b7880cdbe279e920bef3a09d8dab1"}, - {file = "orjson-3.9.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:20925d07a97c49c6305bff1635318d9fc1804aa4ccacb5fb0deb8a910e57d97a"}, - {file = "orjson-3.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:eebfed53bec5674e981ebe8ed2cf00b3f7bcda62d634733ff779c264307ea505"}, - {file = "orjson-3.9.2-cp39-none-win_amd64.whl", hash = "sha256:869b961df5fcedf6c79f4096119b35679b63272362e9b745e668f0391a892d39"}, - {file = "orjson-3.9.2.tar.gz", hash = "sha256:24257c8f641979bf25ecd3e27251b5cc194cdd3a6e96004aac8446f5e63d9664"}, + {file = "orjson-3.9.3-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:082714b5554fcced092c45272f22a93400389733083c43f5043c4316e86f57a2"}, + {file = "orjson-3.9.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97ddec69ca4fa1b66d512cf4f4a3fe6a57c4bf21209295ab2f4ada415996e08a"}, + {file = "orjson-3.9.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ab7501722ec2172b1c6ea333bc47bba3bbb9b5fc0e3e891191e8447f43d3187d"}, + {file = "orjson-3.9.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ae680163ab09f04683d35fbd63eee858019f0066640f7cbad4dba3e7422a4bc"}, + {file = "orjson-3.9.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e5abca1e0a9d110bab7346fab0acd3b7848d2ee13318bc24a31bbfbdad974b8"}, + {file = "orjson-3.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c55f42a8b07cdb7d514cfaeb56f6e9029eef1cbc8e670ac31fc377c46b993cd1"}, + {file = "orjson-3.9.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:303f1324f5ea516f8e874ea0f8d15c581caabdca59fc990705fc76f3bd9f3bdf"}, + {file = "orjson-3.9.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c444e3931ea4fe7dec26d195486a681fedc0233230c9b84848f8e60affd4a4"}, + {file = "orjson-3.9.3-cp310-none-win32.whl", hash = "sha256:63333de96d83091023c9c99cc579973a2977b15feb5cdc8d9660104c886e9ab8"}, + {file = "orjson-3.9.3-cp310-none-win_amd64.whl", hash = "sha256:7bce6ff507a83c6a4b6b00726f3a7d7aed0b1f0884aac0440e95b55cac0b113e"}, + {file = "orjson-3.9.3-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:ec4421f377cce51decd6ea3869a8b41e9f05c50bf6acef8284f8906e642992c4"}, + {file = "orjson-3.9.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b3177bd67756e53bdbd72c79fae3507796a67b67c32a16f4b55cad48ef25c13"}, + {file = "orjson-3.9.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b21908252c8a13b8f48d4cccdb7fabb592824cf39c9fa4e9076015dd65eabeba"}, + {file = "orjson-3.9.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7b795c6ac344b0c49776b7e135a9bed0cd15b1ade2a4c7b3a19e3913247702e"}, + {file = "orjson-3.9.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ac43842f5ba26e6f21b4e63312bd1137111a9b9821d7f7dfe189a4015c6c6bc"}, + {file = "orjson-3.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8def4f6560c7b6dbc4b356dfd8e6624a018d920ce5a2864291a2bf1052cd6b68"}, + {file = "orjson-3.9.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bbc0dafd1de42c8dbfd6e5d1fe4deab15d2de474e11475921286bebefd109ec8"}, + {file = "orjson-3.9.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:85b1870d5420292419b34002659082d77f31b13d4d8cbd67bed9d717c775a0fb"}, + {file = "orjson-3.9.3-cp311-none-win32.whl", hash = "sha256:d6ece3f48f14a06c325181f2b9bd9a9827aac2ecdcad11eb12f561fb697eaaaa"}, + {file = "orjson-3.9.3-cp311-none-win_amd64.whl", hash = "sha256:448feda092c681c0a5b8eec62dd4f625ad5d316dafd56c81fb3f05b5221827ff"}, + {file = "orjson-3.9.3-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:413d7cf731f1222373360128a3d5232d52630a7355f446bf2659fc3445ec0b76"}, + {file = "orjson-3.9.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:009a0f79804c604998b068f5f942e40546913ed45ee2f0a3d0e75695bf7543fa"}, + {file = "orjson-3.9.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ce062844255cce4d6a8a150e8e78b9fcd6c5a3f1ff3f8792922de25827c25b9c"}, + {file = "orjson-3.9.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:776659e18debe5de73c30b0957cd6454fcc61d87377fcb276441fca1b9f1305d"}, + {file = "orjson-3.9.3-cp312-none-win_amd64.whl", hash = "sha256:47b237da3818c8e546df4d2162f0a5cfd50b7b58528907919a27244141e0e48e"}, + {file = "orjson-3.9.3-cp37-cp37m-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:f954115d8496d4ab5975438e3ce07780c1644ea0a66c78a943ef79f33769b61a"}, + {file = "orjson-3.9.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05c57100517b6dbfe34181ed2248bebfab03bd2a7aafb6fbf849c6fd3bb2fbda"}, + {file = "orjson-3.9.3-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:aa6017140fe487ab8fae605a2890c94c6fbe7a8e763ff33bbdb00e27ce078cfd"}, + {file = "orjson-3.9.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6fe77af2ff33c370fb06c9fdf004a66d85ea19c77f0273bbf70c70f98f832725"}, + {file = "orjson-3.9.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e2fa8c385b27bab886caa098fa3ae114d56571ae6e7a5610cb624d7b0a66faed"}, + {file = "orjson-3.9.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8323739e7905ae4ec4dbdebb31067d28be981f30c11b6ae88ddec2671c0b3194"}, + {file = "orjson-3.9.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ad43fd5b1ededb54fe01e67468710fcfec8a5830e4ce131f85e741ea151a18e9"}, + {file = "orjson-3.9.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:42cb645780f732c829bc351346a54157d57f2bc409e671ee36b9fc1037bb77fe"}, + {file = "orjson-3.9.3-cp37-none-win32.whl", hash = "sha256:b84542669d1b0175dc2870025b73cbd4f4a3beb17796de6ec82683663e0400f3"}, + {file = "orjson-3.9.3-cp37-none-win_amd64.whl", hash = "sha256:1440a404ce84f43e2f8e97d8b5fe6f271458e0ffd37290dc3a9f6aa067c69930"}, + {file = "orjson-3.9.3-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:1da8edaefb75f25b449ed4e22d00b9b49211b97dcefd44b742bdd8721d572788"}, + {file = "orjson-3.9.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47210746acda49febe3bb07253eb5d63d7c7511beec5fa702aad3ce64e15664f"}, + {file = "orjson-3.9.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:893c62afd5b26f04e2814dffa4d9d4060583ac43dc3e79ed3eadf62a5ac37b2c"}, + {file = "orjson-3.9.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:32aef33ae33901c327fd5679f91fa37199834d122dffd234416a6fe4193d1982"}, + {file = "orjson-3.9.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd2761384ddb9de63b20795845d5cedadf052255a34c3ff1750cfc77b29d9926"}, + {file = "orjson-3.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19e2502b4af2055050dcc74718f2647b65102087c6f5b3f939e2e1a3e3099602"}, + {file = "orjson-3.9.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:fa7c7a39eeb8dd171f59d96fd4610f908ac14b2f2eb268f4498e5f310bda8da7"}, + {file = "orjson-3.9.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cc3fe0c0ae7acf00d827efe2506131f1b19af3c87e3d76b0e081748984e51c26"}, + {file = "orjson-3.9.3-cp38-none-win32.whl", hash = "sha256:5b1ff8e920518753b310034e5796f0116f7732b0b27531012d46f0b54f3c8c85"}, + {file = "orjson-3.9.3-cp38-none-win_amd64.whl", hash = "sha256:9f2b1007174c93dd838f52e623c972df33057e3cb7ad9341b7d9bbd66b8d8fb4"}, + {file = "orjson-3.9.3-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:cddc5b8bd7b0d1dfd36637eedbd83726b8b8a5969d3ecee70a9b54a94b8a0258"}, + {file = "orjson-3.9.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:43c3bbf4b6f94fad2fd73c81293da8b343fbd07ce48d7836c07d0d54b58c8e93"}, + {file = "orjson-3.9.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a5cc22ef6973992db18952f8b978781e19a0c62c098f475db936284df9311df7"}, + {file = "orjson-3.9.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9dcea93630986209c690f27f32398956b04ccbba8f1fa7c3d1bb88a01d9ab87a"}, + {file = "orjson-3.9.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:526cb34e63faaad908c34597294507b7a4b999a436b4f206bc4e60ff4e911c20"}, + {file = "orjson-3.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f5ac6e30ee10af57f52e72f9c8b9bc4846a9343449d10ca2ae9760615da3042"}, + {file = "orjson-3.9.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b6c37ab097c062bdf535105c7156839c4e370065c476bb2393149ad31a2cdf6e"}, + {file = "orjson-3.9.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:27d69628f449c52a7a34836b15ec948804254f7954457f88de53f2f4de99512f"}, + {file = "orjson-3.9.3-cp39-none-win32.whl", hash = "sha256:5297463d8831c2327ed22bf92eb6d50347071ff1c73fb4702d50b8bc514aeac9"}, + {file = "orjson-3.9.3-cp39-none-win_amd64.whl", hash = "sha256:69a33486b5b6e5a99939fdb13c1c0d8bcc7c89fe6083e7b9ce3c70931ca9fb71"}, + {file = "orjson-3.9.3.tar.gz", hash = "sha256:d3da4faf6398154c1e75d32778035fa7dc284814809f76e8f8d50c4f54859399"}, ] [[package]] name = "overrides" -version = "7.3.1" +version = "7.4.0" description = "A decorator to automatically detect mismatch when overriding a method." optional = false python-versions = ">=3.6" files = [ - {file = "overrides-7.3.1-py3-none-any.whl", hash = "sha256:6187d8710a935d09b0bcef8238301d6ee2569d2ac1ae0ec39a8c7924e27f58ca"}, - {file = "overrides-7.3.1.tar.gz", hash = "sha256:8b97c6c1e1681b78cbc9424b138d880f0803c2254c5ebaabdde57bb6c62093f2"}, + {file = "overrides-7.4.0-py3-none-any.whl", hash = "sha256:3ad24583f86d6d7a49049695efe9933e67ba62f0c7625d53c59fa832ce4b8b7d"}, + {file = "overrides-7.4.0.tar.gz", hash = "sha256:9502a3cca51f4fac40b5feca985b6703a5c1f6ad815588a7ca9e285b9dca6757"}, ] [[package]] @@ -4610,13 +4675,13 @@ test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] [[package]] name = "psycopg" -version = "3.1.9" +version = "3.1.10" description = "PostgreSQL database adapter for Python" optional = false python-versions = ">=3.7" files = [ - {file = "psycopg-3.1.9-py3-none-any.whl", hash = "sha256:fbbac339274d8733ee70ba9822297af3e8871790a26e967b5ea53e30a4b74dcc"}, - {file = "psycopg-3.1.9.tar.gz", hash = "sha256:ab400f207a8c120bafdd8077916d8f6c0106e809401378708485b016508c30c9"}, + {file = "psycopg-3.1.10-py3-none-any.whl", hash = "sha256:8bbeddae5075c7890b2fa3e3553440376d3c5e28418335dee3c3656b06fa2b52"}, + {file = "psycopg-3.1.10.tar.gz", hash = "sha256:15b25741494344c24066dc2479b0f383dd1b82fa5e75612fa4fa5bb30726e9b6"}, ] [package.dependencies] @@ -4624,145 +4689,143 @@ typing-extensions = ">=4.1" tzdata = {version = "*", markers = "sys_platform == \"win32\""} [package.extras] -binary = ["psycopg-binary (==3.1.9)"] -c = ["psycopg-c (==3.1.9)"] -dev = ["black (>=23.1.0)", "dnspython (>=2.1)", "flake8 (>=4.0)", "mypy (>=1.2)", "types-setuptools (>=57.4)", "wheel (>=0.37)"] +binary = ["psycopg-binary (==3.1.10)"] +c = ["psycopg-c (==3.1.10)"] +dev = ["black (>=23.1.0)", "dnspython (>=2.1)", "flake8 (>=4.0)", "mypy (>=1.4.1)", "types-setuptools (>=57.4)", "wheel (>=0.37)"] docs = ["Sphinx (>=5.0)", "furo (==2022.6.21)", "sphinx-autobuild (>=2021.3.14)", "sphinx-autodoc-typehints (>=1.12)"] pool = ["psycopg-pool"] -test = ["anyio (>=3.6.2)", "mypy (>=1.2)", "pproxy (>=2.7)", "pytest (>=6.2.5)", "pytest-cov (>=3.0)", "pytest-randomly (>=3.5)"] +test = ["anyio (>=3.6.2)", "mypy (>=1.4.1)", "pproxy (>=2.7)", "pytest (>=6.2.5)", "pytest-cov (>=3.0)", "pytest-randomly (>=3.5)"] [[package]] name = "psycopg-binary" -version = "3.1.9" +version = "3.1.10" description = "PostgreSQL database adapter for Python -- C optimisation distribution" optional = false python-versions = ">=3.7" files = [ - {file = "psycopg_binary-3.1.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:284038cbe3f5a0f3de417af9b5eaa2a9524a3a06211523cf245111c71b566506"}, - {file = "psycopg_binary-3.1.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d2cea4bb0b19245c83486868d7c66f73238c4caa266b5b3c3d664d10dab2ab56"}, - {file = "psycopg_binary-3.1.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfe5c5c31f59ccb1d1f473466baa93d800138186286e80e251f930e49c80d208"}, - {file = "psycopg_binary-3.1.9-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82704a899d57c29beba5399d41eab5ef5c238b810d7e25e2d1916d2b34c4b1a3"}, - {file = "psycopg_binary-3.1.9-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eab449e39db1c429cac79b7aa27e6827aad4995f32137e922db7254f43fed7b5"}, - {file = "psycopg_binary-3.1.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87e0c97733b11eeca3d24e56df70f3f9d792b2abd46f48be2fb2348ffc3e7e39"}, - {file = "psycopg_binary-3.1.9-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:81e34d6df54329424944d5ca91b1cc77df6b8a9130cb5480680d56f53d4e485c"}, - {file = "psycopg_binary-3.1.9-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e2f463079d99568a343ed0b766150b30627e9ed41de99fd82e945e7e2bec764a"}, - {file = "psycopg_binary-3.1.9-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f2cbdef6568da21c39dfd45c2074e85eabbd00e1b721832ba94980f01f582dd4"}, - {file = "psycopg_binary-3.1.9-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:53afb0cc2ebe74651f339e22d05ec082a0f44939715d9138d357852f074fcf55"}, - {file = "psycopg_binary-3.1.9-cp310-cp310-win_amd64.whl", hash = "sha256:09167f106e7685591b4cdf58eff0191fb7435d586f384133a0dd30df646cf409"}, - {file = "psycopg_binary-3.1.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a8aaa47c1791fc05c0229ec1003dd49e13238fba9434e1fc3b879632f749c3c4"}, - {file = "psycopg_binary-3.1.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3d91ee0d33ac7b42d0488a9be2516efa2ec00901b81d69566ff34a7a94b66c0b"}, - {file = "psycopg_binary-3.1.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f5e36504373e5bcdc954b1da1c6fe66379007fe1e329790e8fb72b879a01e097"}, - {file = "psycopg_binary-3.1.9-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c1def6c2d28e257325b3b208cf1966343b498282a0f4d390fda7b7e0577da64"}, - {file = "psycopg_binary-3.1.9-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:055537a9c20efe9bf17cb72bd879602eda71de6f737ebafa1953e017c6a37fbe"}, - {file = "psycopg_binary-3.1.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5b164355d023a91b23dcc4bb3112bc7d6e9b9c938fb5abcb6e54457d2da1f317"}, - {file = "psycopg_binary-3.1.9-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:03b08545ce1c627f4d5e6384eda2946660c4ba6ceb0a09ae47de07419f725669"}, - {file = "psycopg_binary-3.1.9-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1e31bac3d2d41e6446b20b591f638943328c958f4d1ce13d6f1c5db97c3a8dee"}, - {file = "psycopg_binary-3.1.9-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:a274c63c8fb9d419509bed2ef72befc1fd04243972e17e7f5afc5725cb13a560"}, - {file = "psycopg_binary-3.1.9-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:98d9d156b9ada08c271a79662fc5fcc1731b4d7c1f651ef5843d818d35f15ba0"}, - {file = "psycopg_binary-3.1.9-cp311-cp311-win_amd64.whl", hash = "sha256:c3a13aa022853891cadbc7256a9804e5989def760115c82334bddf0d19783b0b"}, - {file = "psycopg_binary-3.1.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1a321ef3579a8de0545ade6ff1edfde0c88b8847d58c5615c03751c76054796"}, - {file = "psycopg_binary-3.1.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5833bda4c14f24c6a8ac08d3c5712acaa4f35aab31f9ccd2265e9e9a7d0151c8"}, - {file = "psycopg_binary-3.1.9-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a207d5a7f4212443b7452851c9ccd88df9c6d4d58fa2cea2ead4dd9cb328e578"}, - {file = "psycopg_binary-3.1.9-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:07414daa86662f7657e9fabe49af85a32a975e92e6568337887d9c9ffedc224f"}, - {file = "psycopg_binary-3.1.9-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17c5d4936c746f5125c6ef9eb43655e27d4d0c9ffe34c3073878b43c3192511d"}, - {file = "psycopg_binary-3.1.9-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5cdc13c8ec1437240801e43d07e27ff6479ac9dd8583ecf647345bfd2e8390e4"}, - {file = "psycopg_binary-3.1.9-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:3836bdaf030a5648bd5f5b452e4b068b265e28f9199060c5b70dbf4a218cde6e"}, - {file = "psycopg_binary-3.1.9-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:96725d9691a84a21eb3e81c884a2e043054e33e176801a57a05e9ac38d142c6e"}, - {file = "psycopg_binary-3.1.9-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:dade344aa90bb0b57d1cfc13304ed83ab9a36614b8ddd671381b2de72fe1483d"}, - {file = "psycopg_binary-3.1.9-cp37-cp37m-win_amd64.whl", hash = "sha256:db866cc557d9761036771d666d17fa4176c537af7e6098f42a6bf8f64217935f"}, - {file = "psycopg_binary-3.1.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3b62545cc64dd69ea0ae5ffe18d7c97e03660ab8244aa8c5172668a21c41daa0"}, - {file = "psycopg_binary-3.1.9-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:058ab0d79be0b229338f0e61fec6f475077518cba63c22c593645a69f01c3e23"}, - {file = "psycopg_binary-3.1.9-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2340ca2531f69e5ebd9d18987362ba57ed6ab6a271511d8026814a46a2a87b59"}, - {file = "psycopg_binary-3.1.9-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b816ce0e27a2a8786d34b61d3e36e01029245025879d64b88554326b794a4f0"}, - {file = "psycopg_binary-3.1.9-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7b36fe4314a784fbe45c9fd71c902b9bf57341aff9b97c0cbd22f8409a271e2f"}, - {file = "psycopg_binary-3.1.9-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b246fed629482b06f938b23e9281c4af592329daa3ec2cd4a6841ccbfdeb4d68"}, - {file = "psycopg_binary-3.1.9-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:90787ac05b932c0fc678cbf470ccea9c385b8077583f0490136b4569ed3fb652"}, - {file = "psycopg_binary-3.1.9-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:9c114f678e8f4a96530fa79cfd84f65f26358ecfc6cca70cfa2d5e3ae5ef217a"}, - {file = "psycopg_binary-3.1.9-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3a82e77400d1ef6c5bbcf3e600e8bdfacf1a554512f96c090c43ceca3d1ce3b6"}, - {file = "psycopg_binary-3.1.9-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c7d990f14a37345ca05a5192cd5ac938c9cbedca9c929872af6ae311158feb0e"}, - {file = "psycopg_binary-3.1.9-cp38-cp38-win_amd64.whl", hash = "sha256:e0ca74fd85718723bb9f08e0c6898e901a0c365aef20b3c3a4ef8709125d6210"}, - {file = "psycopg_binary-3.1.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ce8f4dea5934aa6c4933e559c74bef4beb3413f51fbcf17f306ce890216ac33a"}, - {file = "psycopg_binary-3.1.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f41a9e0de4db194c053bcc7c00c35422a4d19d92a8187e8065b1c560626efe35"}, - {file = "psycopg_binary-3.1.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f94a7985135e084e122b143956c6f589d17aef743ecd0a434a3d3a222631d5a"}, - {file = "psycopg_binary-3.1.9-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3bb86d58b90faefdc0bbedf08fdea4cc2afcb1cfa4340f027d458bfd01d8b812"}, - {file = "psycopg_binary-3.1.9-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6c696dc84f9ff155761df15779181d8e4af7746b98908e130add8259912e4bb7"}, - {file = "psycopg_binary-3.1.9-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4213953da44324850c8f789301cf665f46fb94301ba403301e7af58546c3a428"}, - {file = "psycopg_binary-3.1.9-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:25e3ce947aaaa1bd9f1920fca76d7281660646304f9ea5bc036b201dd8790655"}, - {file = "psycopg_binary-3.1.9-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9c75be2a9b986139e3ff6bc0a2852081ac00811040f9b82d3aa539821311122e"}, - {file = "psycopg_binary-3.1.9-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:63e8d1dbe253657c70dbfa9c59423f4654d82698fc5ed6868b8dc0765abe20b6"}, - {file = "psycopg_binary-3.1.9-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f4da4ca9b2365fc1d3fc741c3bbd3efccd892ce813444b884c8911a1acf1c932"}, - {file = "psycopg_binary-3.1.9-cp39-cp39-win_amd64.whl", hash = "sha256:c0b8d6bbeff1dba760a208d8bc205a05b745e6cee02b839f969f72cf56a8b80d"}, + {file = "psycopg_binary-3.1.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a529c203f6e0f4c67ba27cf8f9739eb3bc880ad70d6ad6c0e56c2230a66b5a09"}, + {file = "psycopg_binary-3.1.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:bd6e14d1aeb12754a43446c77a5ce819b68875cc25ae6538089ef90d7f6dd6f7"}, + {file = "psycopg_binary-3.1.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1583ced5948cf88124212c4503dfe5b01ac3e2dd1a2833c083917f4c4aabe8b4"}, + {file = "psycopg_binary-3.1.10-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2098721c486478987be700723b28ec7a48f134eba339de36af0e745f37dfe461"}, + {file = "psycopg_binary-3.1.10-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7e61f7b412fca7b15dd043a0b22fd528d2ed8276e76b3764c3889e29fa65082b"}, + {file = "psycopg_binary-3.1.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0f33e33a072e3d5af51ee4d4a439e10dbe623fe87ef295d5d688180d529f13f"}, + {file = "psycopg_binary-3.1.10-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f6f7738c59262d8d19154164d99c881ed58ed377fb6f1d685eb0dc43bbcd8022"}, + {file = "psycopg_binary-3.1.10-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:511d38b1e1961d179d47d5103ba9634ecfc7ead431d19a9337ef82f3a2bca807"}, + {file = "psycopg_binary-3.1.10-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:666e7acf2ffdb5e8a58e8b0c1759facdb9688c7e90ee8ca7aed675803b57404d"}, + {file = "psycopg_binary-3.1.10-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:57b93c756fee5f7c7bd580c34cd5d244f7d5638f8b2cf25333f97b9b8b2ebfd1"}, + {file = "psycopg_binary-3.1.10-cp310-cp310-win_amd64.whl", hash = "sha256:a1d61b7724c7215a8ea4495a5c6b704656f4b7bb6165f4cb9989b685886ebc48"}, + {file = "psycopg_binary-3.1.10-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:36fff836a7823c9d71fa7faa333c74b2b081af216cebdbb0f481dce55ee2d974"}, + {file = "psycopg_binary-3.1.10-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:32caf98cb00881bfcbbbae39a15f2a4e08b79ff983f1c0f13b60a888ef6e8431"}, + {file = "psycopg_binary-3.1.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5565a6a86fee8d74f30de89e07f399567cdf59367aeb09624eb690d524339076"}, + {file = "psycopg_binary-3.1.10-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9fb0d64520b29bd80a6731476ad8e1c20348dfdee00ab098899d23247b641675"}, + {file = "psycopg_binary-3.1.10-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfc05ed4e74fa8615d7cc2bd57f00f97662f4e865a731dbd43da9a527e289c8c"}, + {file = "psycopg_binary-3.1.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c5b59c8cff887757ddf438ff9489d79c5e6b717112c96f5c68e16f367ff8724e"}, + {file = "psycopg_binary-3.1.10-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a4cbaf12361136afefc5faab21a174a437e71c803b083f410e5140c7605bc66b"}, + {file = "psycopg_binary-3.1.10-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ff72576061c774bcce5f5440b93e63d4c430032dd056d30f6cb1988e549dd92c"}, + {file = "psycopg_binary-3.1.10-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:a4e91e1a8d61c60f592a1dfcebdf55e52a29fe4fdb650c5bd5414c848e77d029"}, + {file = "psycopg_binary-3.1.10-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f7187269d825e84c945be7d93dd5088a4e0b6481a4bdaba3bf7069d4ac13703d"}, + {file = "psycopg_binary-3.1.10-cp311-cp311-win_amd64.whl", hash = "sha256:ba7812a593c16d9d661844dc8dd4d81548fd1c2a0ee676f3e3d8638369f4c5e4"}, + {file = "psycopg_binary-3.1.10-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:88caa5859740507b3596c6c2e00ceaccee2c6ab5317bc535887801ad3cc7f3e1"}, + {file = "psycopg_binary-3.1.10-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a3a7e99ba10c2e83a48d79431560e0d5ca7865f68f2bac3a462dc2b151e9926"}, + {file = "psycopg_binary-3.1.10-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:848f4f4707dc73f4b4e844c92f3de795b2ddb728f75132602bda5e6ba55084fc"}, + {file = "psycopg_binary-3.1.10-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:415961e839bb49cfd75cd961503fb8846c0768f247db1fa7171c1ac61d38711b"}, + {file = "psycopg_binary-3.1.10-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0471869e658d0c6b8c3ed53153794739c18d7dad2dd5b8e6ff023a364c20f7df"}, + {file = "psycopg_binary-3.1.10-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:4290060ee0d856caa979ecf675c0e6959325f508272ccf27f64c3801c7bcbde7"}, + {file = "psycopg_binary-3.1.10-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:abf04bc06c8f6a1ac3dc2106d3b79c8661352e9d8a57ca2934ffa6aae8fe600a"}, + {file = "psycopg_binary-3.1.10-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:51fe70708243b83bf16710d8c11b61bd46562e6a24a6300d5434380b35911059"}, + {file = "psycopg_binary-3.1.10-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8b658f7f8b49fb60a1c52e3f6692f690a85bdf1ad30aafe0f3f1fd74f6958cf8"}, + {file = "psycopg_binary-3.1.10-cp37-cp37m-win_amd64.whl", hash = "sha256:ffc8c796194f23b9b07f6d25f927ec4df84a194bbc7a1f9e73316734eef512f9"}, + {file = "psycopg_binary-3.1.10-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:74ce92122be34cf0e5f06d79869e1001c8421a68fa7ddf6fe38a717155cf3a64"}, + {file = "psycopg_binary-3.1.10-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:75608a900984061c8898be68fbddc6f3da5eefdffce6e0624f5371645740d172"}, + {file = "psycopg_binary-3.1.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6670d160d054466e8fdedfbc749ef8bf7dfdf69296048954d24645dd4d3d3c01"}, + {file = "psycopg_binary-3.1.10-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d32026cfab7ba7ac687a42c33345026a2fb6fc5608a6144077f767af4386be0b"}, + {file = "psycopg_binary-3.1.10-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:908fa388a5b75dfd17a937acb24708bd272e21edefca9a495004c6f70ec2636a"}, + {file = "psycopg_binary-3.1.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e46b97073bd4de114f475249d681eaf054e950699c5d7af554d3684db39b82d"}, + {file = "psycopg_binary-3.1.10-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9cf56bb4b115def3a18157f3b3b7d8322ee94a8dea30028db602c8f9ae34ad1e"}, + {file = "psycopg_binary-3.1.10-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3b6c6f90241c4c5a6ca3f0d8827e37ef90fdc4deb9d8cfa5678baa0ea374b391"}, + {file = "psycopg_binary-3.1.10-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:747176a6aeb058079f56c5397bd90339581ab7b3cc0d62e7445654e6a484c7e1"}, + {file = "psycopg_binary-3.1.10-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:41a415e78c457b06497fa0084e4ea7245ca1a377b55756dd757034210b64da7e"}, + {file = "psycopg_binary-3.1.10-cp38-cp38-win_amd64.whl", hash = "sha256:a7bbe9017edd898d7b3a8747700ed045dda96a907dff87f45e642e28d8584481"}, + {file = "psycopg_binary-3.1.10-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0f062f20256708929a58c41d44f350efced4c00a603323d1413f6dc0b84d95a5"}, + {file = "psycopg_binary-3.1.10-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dea30f2704337ca2d0322fccfe1fa30f61ce9185de3937eb986321063114a51f"}, + {file = "psycopg_binary-3.1.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9d88ac72531034ebf7ec09114e732b066a9078f4ce213cf65cc5e42eb538d30"}, + {file = "psycopg_binary-3.1.10-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2bea0940d69c3e24a72530730952687912893b34c53aa39e79045e7b446174d"}, + {file = "psycopg_binary-3.1.10-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6a691dc8e2436d9c1e5cf93902d63e9501688fccc957eb22f952d37886257470"}, + {file = "psycopg_binary-3.1.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa92661f99351765673835a4d936d79bd24dfbb358b29b084d83be38229a90e4"}, + {file = "psycopg_binary-3.1.10-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:30eb731ed5525d8df892db6532cc8ffd8a163b73bc355127dee9c49334e16eee"}, + {file = "psycopg_binary-3.1.10-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:50bf7a59d3a85a82d466fed341d352b44d09d6adc18656101d163a7cfc6509a0"}, + {file = "psycopg_binary-3.1.10-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:f48665947c55f8d6eb3f0be98de80411508e1ec329f354685329b57fced82c7f"}, + {file = "psycopg_binary-3.1.10-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:caa771569da01fc0389ca34920c331a284425a68f92d1ba0a80cc08935f8356e"}, + {file = "psycopg_binary-3.1.10-cp39-cp39-win_amd64.whl", hash = "sha256:b30887e631fd67affaed98f6cd2135b44f2d1a6d9bca353a69c3889c78bd7aa8"}, ] [[package]] name = "psycopg2-binary" -version = "2.9.6" +version = "2.9.7" description = "psycopg2 - Python-PostgreSQL Database Adapter" optional = false python-versions = ">=3.6" files = [ - {file = "psycopg2-binary-2.9.6.tar.gz", hash = "sha256:1f64dcfb8f6e0c014c7f55e51c9759f024f70ea572fbdef123f85318c297947c"}, - {file = "psycopg2_binary-2.9.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d26e0342183c762de3276cca7a530d574d4e25121ca7d6e4a98e4f05cb8e4df7"}, - {file = "psycopg2_binary-2.9.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c48d8f2db17f27d41fb0e2ecd703ea41984ee19362cbce52c097963b3a1b4365"}, - {file = "psycopg2_binary-2.9.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffe9dc0a884a8848075e576c1de0290d85a533a9f6e9c4e564f19adf8f6e54a7"}, - {file = "psycopg2_binary-2.9.6-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8a76e027f87753f9bd1ab5f7c9cb8c7628d1077ef927f5e2446477153a602f2c"}, - {file = "psycopg2_binary-2.9.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6460c7a99fc939b849431f1e73e013d54aa54293f30f1109019c56a0b2b2ec2f"}, - {file = "psycopg2_binary-2.9.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae102a98c547ee2288637af07393dd33f440c25e5cd79556b04e3fca13325e5f"}, - {file = "psycopg2_binary-2.9.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9972aad21f965599ed0106f65334230ce826e5ae69fda7cbd688d24fa922415e"}, - {file = "psycopg2_binary-2.9.6-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7a40c00dbe17c0af5bdd55aafd6ff6679f94a9be9513a4c7e071baf3d7d22a70"}, - {file = "psycopg2_binary-2.9.6-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:cacbdc5839bdff804dfebc058fe25684cae322987f7a38b0168bc1b2df703fb1"}, - {file = "psycopg2_binary-2.9.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7f0438fa20fb6c7e202863e0d5ab02c246d35efb1d164e052f2f3bfe2b152bd0"}, - {file = "psycopg2_binary-2.9.6-cp310-cp310-win32.whl", hash = "sha256:b6c8288bb8a84b47e07013bb4850f50538aa913d487579e1921724631d02ea1b"}, - {file = "psycopg2_binary-2.9.6-cp310-cp310-win_amd64.whl", hash = "sha256:61b047a0537bbc3afae10f134dc6393823882eb263088c271331602b672e52e9"}, - {file = "psycopg2_binary-2.9.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:964b4dfb7c1c1965ac4c1978b0f755cc4bd698e8aa2b7667c575fb5f04ebe06b"}, - {file = "psycopg2_binary-2.9.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:afe64e9b8ea66866a771996f6ff14447e8082ea26e675a295ad3bdbffdd72afb"}, - {file = "psycopg2_binary-2.9.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15e2ee79e7cf29582ef770de7dab3d286431b01c3bb598f8e05e09601b890081"}, - {file = "psycopg2_binary-2.9.6-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dfa74c903a3c1f0d9b1c7e7b53ed2d929a4910e272add6700c38f365a6002820"}, - {file = "psycopg2_binary-2.9.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b83456c2d4979e08ff56180a76429263ea254c3f6552cd14ada95cff1dec9bb8"}, - {file = "psycopg2_binary-2.9.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0645376d399bfd64da57148694d78e1f431b1e1ee1054872a5713125681cf1be"}, - {file = "psycopg2_binary-2.9.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e99e34c82309dd78959ba3c1590975b5d3c862d6f279f843d47d26ff89d7d7e1"}, - {file = "psycopg2_binary-2.9.6-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4ea29fc3ad9d91162c52b578f211ff1c931d8a38e1f58e684c45aa470adf19e2"}, - {file = "psycopg2_binary-2.9.6-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:4ac30da8b4f57187dbf449294d23b808f8f53cad6b1fc3623fa8a6c11d176dd0"}, - {file = "psycopg2_binary-2.9.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e78e6e2a00c223e164c417628572a90093c031ed724492c763721c2e0bc2a8df"}, - {file = "psycopg2_binary-2.9.6-cp311-cp311-win32.whl", hash = "sha256:1876843d8e31c89c399e31b97d4b9725a3575bb9c2af92038464231ec40f9edb"}, - {file = "psycopg2_binary-2.9.6-cp311-cp311-win_amd64.whl", hash = "sha256:b4b24f75d16a89cc6b4cdff0eb6a910a966ecd476d1e73f7ce5985ff1328e9a6"}, - {file = "psycopg2_binary-2.9.6-cp36-cp36m-win32.whl", hash = "sha256:498807b927ca2510baea1b05cc91d7da4718a0f53cb766c154c417a39f1820a0"}, - {file = "psycopg2_binary-2.9.6-cp36-cp36m-win_amd64.whl", hash = "sha256:0d236c2825fa656a2d98bbb0e52370a2e852e5a0ec45fc4f402977313329174d"}, - {file = "psycopg2_binary-2.9.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:34b9ccdf210cbbb1303c7c4db2905fa0319391bd5904d32689e6dd5c963d2ea8"}, - {file = "psycopg2_binary-2.9.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84d2222e61f313c4848ff05353653bf5f5cf6ce34df540e4274516880d9c3763"}, - {file = "psycopg2_binary-2.9.6-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30637a20623e2a2eacc420059be11527f4458ef54352d870b8181a4c3020ae6b"}, - {file = "psycopg2_binary-2.9.6-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8122cfc7cae0da9a3077216528b8bb3629c43b25053284cc868744bfe71eb141"}, - {file = "psycopg2_binary-2.9.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38601cbbfe600362c43714482f43b7c110b20cb0f8172422c616b09b85a750c5"}, - {file = "psycopg2_binary-2.9.6-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c7e62ab8b332147a7593a385d4f368874d5fe4ad4e341770d4983442d89603e3"}, - {file = "psycopg2_binary-2.9.6-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2ab652e729ff4ad76d400df2624d223d6e265ef81bb8aa17fbd63607878ecbee"}, - {file = "psycopg2_binary-2.9.6-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:c83a74b68270028dc8ee74d38ecfaf9c90eed23c8959fca95bd703d25b82c88e"}, - {file = "psycopg2_binary-2.9.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d4e6036decf4b72d6425d5b29bbd3e8f0ff1059cda7ac7b96d6ac5ed34ffbacd"}, - {file = "psycopg2_binary-2.9.6-cp37-cp37m-win32.whl", hash = "sha256:a8c28fd40a4226b4a84bdf2d2b5b37d2c7bd49486b5adcc200e8c7ec991dfa7e"}, - {file = "psycopg2_binary-2.9.6-cp37-cp37m-win_amd64.whl", hash = "sha256:51537e3d299be0db9137b321dfb6a5022caaab275775680e0c3d281feefaca6b"}, - {file = "psycopg2_binary-2.9.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cf4499e0a83b7b7edcb8dabecbd8501d0d3a5ef66457200f77bde3d210d5debb"}, - {file = "psycopg2_binary-2.9.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7e13a5a2c01151f1208d5207e42f33ba86d561b7a89fca67c700b9486a06d0e2"}, - {file = "psycopg2_binary-2.9.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e0f754d27fddcfd74006455b6e04e6705d6c31a612ec69ddc040a5468e44b4e"}, - {file = "psycopg2_binary-2.9.6-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d57c3fd55d9058645d26ae37d76e61156a27722097229d32a9e73ed54819982a"}, - {file = "psycopg2_binary-2.9.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:71f14375d6f73b62800530b581aed3ada394039877818b2d5f7fc77e3bb6894d"}, - {file = "psycopg2_binary-2.9.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:441cc2f8869a4f0f4bb408475e5ae0ee1f3b55b33f350406150277f7f35384fc"}, - {file = "psycopg2_binary-2.9.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:65bee1e49fa6f9cf327ce0e01c4c10f39165ee76d35c846ade7cb0ec6683e303"}, - {file = "psycopg2_binary-2.9.6-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:af335bac6b666cc6aea16f11d486c3b794029d9df029967f9938a4bed59b6a19"}, - {file = "psycopg2_binary-2.9.6-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:cfec476887aa231b8548ece2e06d28edc87c1397ebd83922299af2e051cf2827"}, - {file = "psycopg2_binary-2.9.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:65c07febd1936d63bfde78948b76cd4c2a411572a44ac50719ead41947d0f26b"}, - {file = "psycopg2_binary-2.9.6-cp38-cp38-win32.whl", hash = "sha256:4dfb4be774c4436a4526d0c554af0cc2e02082c38303852a36f6456ece7b3503"}, - {file = "psycopg2_binary-2.9.6-cp38-cp38-win_amd64.whl", hash = "sha256:02c6e3cf3439e213e4ee930308dc122d6fb4d4bea9aef4a12535fbd605d1a2fe"}, - {file = "psycopg2_binary-2.9.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e9182eb20f41417ea1dd8e8f7888c4d7c6e805f8a7c98c1081778a3da2bee3e4"}, - {file = "psycopg2_binary-2.9.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8a6979cf527e2603d349a91060f428bcb135aea2be3201dff794813256c274f1"}, - {file = "psycopg2_binary-2.9.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8338a271cb71d8da40b023a35d9c1e919eba6cbd8fa20a54b748a332c355d896"}, - {file = "psycopg2_binary-2.9.6-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e3ed340d2b858d6e6fb5083f87c09996506af483227735de6964a6100b4e6a54"}, - {file = "psycopg2_binary-2.9.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f81e65376e52f03422e1fb475c9514185669943798ed019ac50410fb4c4df232"}, - {file = "psycopg2_binary-2.9.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfb13af3c5dd3a9588000910178de17010ebcccd37b4f9794b00595e3a8ddad3"}, - {file = "psycopg2_binary-2.9.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4c727b597c6444a16e9119386b59388f8a424223302d0c06c676ec8b4bc1f963"}, - {file = "psycopg2_binary-2.9.6-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4d67fbdaf177da06374473ef6f7ed8cc0a9dc640b01abfe9e8a2ccb1b1402c1f"}, - {file = "psycopg2_binary-2.9.6-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:0892ef645c2fabb0c75ec32d79f4252542d0caec1d5d949630e7d242ca4681a3"}, - {file = "psycopg2_binary-2.9.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:02c0f3757a4300cf379eb49f543fb7ac527fb00144d39246ee40e1df684ab514"}, - {file = "psycopg2_binary-2.9.6-cp39-cp39-win32.whl", hash = "sha256:c3dba7dab16709a33a847e5cd756767271697041fbe3fe97c215b1fc1f5c9848"}, - {file = "psycopg2_binary-2.9.6-cp39-cp39-win_amd64.whl", hash = "sha256:f6a88f384335bb27812293fdb11ac6aee2ca3f51d3c7820fe03de0a304ab6249"}, + {file = "psycopg2-binary-2.9.7.tar.gz", hash = "sha256:1b918f64a51ffe19cd2e230b3240ba481330ce1d4b7875ae67305bd1d37b041c"}, + {file = "psycopg2_binary-2.9.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ea5f8ee87f1eddc818fc04649d952c526db4426d26bab16efbe5a0c52b27d6ab"}, + {file = "psycopg2_binary-2.9.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2993ccb2b7e80844d534e55e0f12534c2871952f78e0da33c35e648bf002bbff"}, + {file = "psycopg2_binary-2.9.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dbbc3c5d15ed76b0d9db7753c0db40899136ecfe97d50cbde918f630c5eb857a"}, + {file = "psycopg2_binary-2.9.7-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:692df8763b71d42eb8343f54091368f6f6c9cfc56dc391858cdb3c3ef1e3e584"}, + {file = "psycopg2_binary-2.9.7-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9dcfd5d37e027ec393a303cc0a216be564b96c80ba532f3d1e0d2b5e5e4b1e6e"}, + {file = "psycopg2_binary-2.9.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17cc17a70dfb295a240db7f65b6d8153c3d81efb145d76da1e4a096e9c5c0e63"}, + {file = "psycopg2_binary-2.9.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e5666632ba2b0d9757b38fc17337d84bdf932d38563c5234f5f8c54fd01349c9"}, + {file = "psycopg2_binary-2.9.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7db7b9b701974c96a88997d458b38ccb110eba8f805d4b4f74944aac48639b42"}, + {file = "psycopg2_binary-2.9.7-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c82986635a16fb1fa15cd5436035c88bc65c3d5ced1cfaac7f357ee9e9deddd4"}, + {file = "psycopg2_binary-2.9.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4fe13712357d802080cfccbf8c6266a3121dc0e27e2144819029095ccf708372"}, + {file = "psycopg2_binary-2.9.7-cp310-cp310-win32.whl", hash = "sha256:122641b7fab18ef76b18860dd0c772290566b6fb30cc08e923ad73d17461dc63"}, + {file = "psycopg2_binary-2.9.7-cp310-cp310-win_amd64.whl", hash = "sha256:f8651cf1f144f9ee0fa7d1a1df61a9184ab72962531ca99f077bbdcba3947c58"}, + {file = "psycopg2_binary-2.9.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4ecc15666f16f97709106d87284c136cdc82647e1c3f8392a672616aed3c7151"}, + {file = "psycopg2_binary-2.9.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3fbb1184c7e9d28d67671992970718c05af5f77fc88e26fd7136613c4ece1f89"}, + {file = "psycopg2_binary-2.9.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a7968fd20bd550431837656872c19575b687f3f6f98120046228e451e4064df"}, + {file = "psycopg2_binary-2.9.7-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:094af2e77a1976efd4956a031028774b827029729725e136514aae3cdf49b87b"}, + {file = "psycopg2_binary-2.9.7-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:26484e913d472ecb6b45937ea55ce29c57c662066d222fb0fbdc1fab457f18c5"}, + {file = "psycopg2_binary-2.9.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f309b77a7c716e6ed9891b9b42953c3ff7d533dc548c1e33fddc73d2f5e21f9"}, + {file = "psycopg2_binary-2.9.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6d92e139ca388ccfe8c04aacc163756e55ba4c623c6ba13d5d1595ed97523e4b"}, + {file = "psycopg2_binary-2.9.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:2df562bb2e4e00ee064779902d721223cfa9f8f58e7e52318c97d139cf7f012d"}, + {file = "psycopg2_binary-2.9.7-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:4eec5d36dbcfc076caab61a2114c12094c0b7027d57e9e4387b634e8ab36fd44"}, + {file = "psycopg2_binary-2.9.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1011eeb0c51e5b9ea1016f0f45fa23aca63966a4c0afcf0340ccabe85a9f65bd"}, + {file = "psycopg2_binary-2.9.7-cp311-cp311-win32.whl", hash = "sha256:ded8e15f7550db9e75c60b3d9fcbc7737fea258a0f10032cdb7edc26c2a671fd"}, + {file = "psycopg2_binary-2.9.7-cp311-cp311-win_amd64.whl", hash = "sha256:8a136c8aaf6615653450817a7abe0fc01e4ea720ae41dfb2823eccae4b9062a3"}, + {file = "psycopg2_binary-2.9.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2dec5a75a3a5d42b120e88e6ed3e3b37b46459202bb8e36cd67591b6e5feebc1"}, + {file = "psycopg2_binary-2.9.7-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc10da7e7df3380426521e8c1ed975d22df678639da2ed0ec3244c3dc2ab54c8"}, + {file = "psycopg2_binary-2.9.7-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee919b676da28f78f91b464fb3e12238bd7474483352a59c8a16c39dfc59f0c5"}, + {file = "psycopg2_binary-2.9.7-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb1c0e682138f9067a58fc3c9a9bf1c83d8e08cfbee380d858e63196466d5c86"}, + {file = "psycopg2_binary-2.9.7-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00d8db270afb76f48a499f7bb8fa70297e66da67288471ca873db88382850bf4"}, + {file = "psycopg2_binary-2.9.7-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:9b0c2b466b2f4d89ccc33784c4ebb1627989bd84a39b79092e560e937a11d4ac"}, + {file = "psycopg2_binary-2.9.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:51d1b42d44f4ffb93188f9b39e6d1c82aa758fdb8d9de65e1ddfe7a7d250d7ad"}, + {file = "psycopg2_binary-2.9.7-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:11abdbfc6f7f7dea4a524b5f4117369b0d757725798f1593796be6ece20266cb"}, + {file = "psycopg2_binary-2.9.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:f02f4a72cc3ab2565c6d9720f0343cb840fb2dc01a2e9ecb8bc58ccf95dc5c06"}, + {file = "psycopg2_binary-2.9.7-cp37-cp37m-win32.whl", hash = "sha256:81d5dd2dd9ab78d31a451e357315f201d976c131ca7d43870a0e8063b6b7a1ec"}, + {file = "psycopg2_binary-2.9.7-cp37-cp37m-win_amd64.whl", hash = "sha256:62cb6de84d7767164a87ca97e22e5e0a134856ebcb08f21b621c6125baf61f16"}, + {file = "psycopg2_binary-2.9.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:59f7e9109a59dfa31efa022e94a244736ae401526682de504e87bd11ce870c22"}, + {file = "psycopg2_binary-2.9.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:95a7a747bdc3b010bb6a980f053233e7610276d55f3ca506afff4ad7749ab58a"}, + {file = "psycopg2_binary-2.9.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c721ee464e45ecf609ff8c0a555018764974114f671815a0a7152aedb9f3343"}, + {file = "psycopg2_binary-2.9.7-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4f37bbc6588d402980ffbd1f3338c871368fb4b1cfa091debe13c68bb3852b3"}, + {file = "psycopg2_binary-2.9.7-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac83ab05e25354dad798401babaa6daa9577462136ba215694865394840e31f8"}, + {file = "psycopg2_binary-2.9.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:024eaeb2a08c9a65cd5f94b31ace1ee3bb3f978cd4d079406aef85169ba01f08"}, + {file = "psycopg2_binary-2.9.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1c31c2606ac500dbd26381145684d87730a2fac9a62ebcfbaa2b119f8d6c19f4"}, + {file = "psycopg2_binary-2.9.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:42a62ef0e5abb55bf6ffb050eb2b0fcd767261fa3faf943a4267539168807522"}, + {file = "psycopg2_binary-2.9.7-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:7952807f95c8eba6a8ccb14e00bf170bb700cafcec3924d565235dffc7dc4ae8"}, + {file = "psycopg2_binary-2.9.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e02bc4f2966475a7393bd0f098e1165d470d3fa816264054359ed4f10f6914ea"}, + {file = "psycopg2_binary-2.9.7-cp38-cp38-win32.whl", hash = "sha256:fdca0511458d26cf39b827a663d7d87db6f32b93efc22442a742035728603d5f"}, + {file = "psycopg2_binary-2.9.7-cp38-cp38-win_amd64.whl", hash = "sha256:d0b16e5bb0ab78583f0ed7ab16378a0f8a89a27256bb5560402749dbe8a164d7"}, + {file = "psycopg2_binary-2.9.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6822c9c63308d650db201ba22fe6648bd6786ca6d14fdaf273b17e15608d0852"}, + {file = "psycopg2_binary-2.9.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8f94cb12150d57ea433e3e02aabd072205648e86f1d5a0a692d60242f7809b15"}, + {file = "psycopg2_binary-2.9.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5ee89587696d808c9a00876065d725d4ae606f5f7853b961cdbc348b0f7c9a1"}, + {file = "psycopg2_binary-2.9.7-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ad5ec10b53cbb57e9a2e77b67e4e4368df56b54d6b00cc86398578f1c635f329"}, + {file = "psycopg2_binary-2.9.7-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:642df77484b2dcaf87d4237792246d8068653f9e0f5c025e2c692fc56b0dda70"}, + {file = "psycopg2_binary-2.9.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6a8b575ac45af1eaccbbcdcf710ab984fd50af048fe130672377f78aaff6fc1"}, + {file = "psycopg2_binary-2.9.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f955aa50d7d5220fcb6e38f69ea126eafecd812d96aeed5d5f3597f33fad43bb"}, + {file = "psycopg2_binary-2.9.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ad26d4eeaa0d722b25814cce97335ecf1b707630258f14ac4d2ed3d1d8415265"}, + {file = "psycopg2_binary-2.9.7-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:ced63c054bdaf0298f62681d5dcae3afe60cbae332390bfb1acf0e23dcd25fc8"}, + {file = "psycopg2_binary-2.9.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2b04da24cbde33292ad34a40db9832a80ad12de26486ffeda883413c9e1b1d5e"}, + {file = "psycopg2_binary-2.9.7-cp39-cp39-win32.whl", hash = "sha256:18f12632ab516c47c1ac4841a78fddea6508a8284c7cf0f292cb1a523f2e2379"}, + {file = "psycopg2_binary-2.9.7-cp39-cp39-win_amd64.whl", hash = "sha256:eb3b8d55924a6058a26db69fb1d3e7e32695ff8b491835ba9f479537e14dcf9f"}, ] [[package]] @@ -4975,13 +5038,13 @@ email = ["email-validator (>=1.0.3)"] [[package]] name = "pygments" -version = "2.15.1" +version = "2.16.1" description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.7" files = [ - {file = "Pygments-2.15.1-py3-none-any.whl", hash = "sha256:db2db3deb4b4179f399a09054b023b6a586b76499d36965813c71aa8ed7b5fd1"}, - {file = "Pygments-2.15.1.tar.gz", hash = "sha256:8ace4d3c1dd481894b2005f560ead0f9f19ee64fe983366be1a21e171d12775c"}, + {file = "Pygments-2.16.1-py3-none-any.whl", hash = "sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692"}, + {file = "Pygments-2.16.1.tar.gz", hash = "sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29"}, ] [package.extras] @@ -5108,23 +5171,23 @@ diagrams = ["jinja2", "railroad-diagrams"] [[package]] name = "pypdf" -version = "3.14.0" +version = "3.15.0" description = "A pure-python PDF library capable of splitting, merging, cropping, and transforming PDF files" optional = false python-versions = ">=3.6" files = [ - {file = "pypdf-3.14.0-py3-none-any.whl", hash = "sha256:55a5943d9a598ff6b9d301acf8fa33303656a1ea86fd3d754c6d20d417636c6f"}, - {file = "pypdf-3.14.0.tar.gz", hash = "sha256:1fb4edffa5d3a448f964d0ad2a31cd8e408ea5d76d45efac042a8c3448c83b0a"}, + {file = "pypdf-3.15.0-py3-none-any.whl", hash = "sha256:2e29ddb62561ec91157c784783714703ddd3ce08f070ecbc57404fb86cd9fc97"}, + {file = "pypdf-3.15.0.tar.gz", hash = "sha256:8a6264e1c47c63dc2484e29bdfa76b121435896a84e94b7c5ae82c6ae96354bb"}, ] [package.dependencies] typing_extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} [package.extras] -crypto = ["PyCryptodome"] +crypto = ["PyCryptodome", "cryptography"] dev = ["black", "flit", "pip-tools", "pre-commit (<2.18.0)", "pytest-cov", "pytest-socket", "wheel"] docs = ["myst_parser", "sphinx", "sphinx_rtd_theme"] -full = ["Pillow (>=8.0.0)", "PyCryptodome"] +full = ["Pillow (>=8.0.0)", "PyCryptodome", "cryptography"] image = ["Pillow (>=8.0.0)"] [[package]] @@ -5528,13 +5591,13 @@ cffi = {version = "*", markers = "implementation_name == \"pypy\""} [[package]] name = "qdrant-client" -version = "1.3.2" +version = "1.4.0" description = "Client library for the Qdrant vector search engine" optional = false python-versions = ">=3.7,<3.12" files = [ - {file = "qdrant_client-1.3.2-py3-none-any.whl", hash = "sha256:66a076016fb9d33bec8170e96516d7e4a0ee5c611824cc9be18590ffeb3cf9aa"}, - {file = "qdrant_client-1.3.2.tar.gz", hash = "sha256:6638c9eac027f2c0fdb1f63c3bd7b403fe8c3f73cb1f21fd15fd60f71012d537"}, + {file = "qdrant_client-1.4.0-py3-none-any.whl", hash = "sha256:2f9e563955b5163da98016f2ed38d9aea5058576c7c5844e9aa205d28155f56d"}, + {file = "qdrant_client-1.4.0.tar.gz", hash = "sha256:2e54f5a80eb1e7e67f4603b76365af4817af15fb3d0c0f44de4fd93afbbe5537"}, ] [package.dependencies] @@ -5790,59 +5853,49 @@ files = [ [[package]] name = "safetensors" -version = "0.3.1" +version = "0.3.2" description = "Fast and Safe Tensor serialization" optional = true python-versions = "*" files = [ - {file = "safetensors-0.3.1-cp310-cp310-macosx_10_11_x86_64.whl", hash = "sha256:2ae9b7dd268b4bae6624729dac86deb82104820e9786429b0583e5168db2f770"}, - {file = "safetensors-0.3.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:08c85c1934682f1e2cd904d38433b53cd2a98245a7cc31f5689f9322a2320bbf"}, - {file = "safetensors-0.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba625c7af9e1c5d0d91cb83d2fba97d29ea69d4db2015d9714d24c7f6d488e15"}, - {file = "safetensors-0.3.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b57d5890c619ec10d9f1b6426b8690d0c9c2868a90dc52f13fae6f6407ac141f"}, - {file = "safetensors-0.3.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c9f562ea696d50b95cadbeb1716dc476714a87792ffe374280c0835312cbfe2"}, - {file = "safetensors-0.3.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c115951b3a865ece8d98ee43882f2fd0a999c0200d6e6fec24134715ebe3b57"}, - {file = "safetensors-0.3.1-cp310-cp310-win32.whl", hash = "sha256:118f8f7503ea312fc7af27e934088a1b589fb1eff5a7dea2cd1de6c71ee33391"}, - {file = "safetensors-0.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:54846eaae25fded28a7bebbb66be563cad221b4c80daee39e2f55df5e5e0266f"}, - {file = "safetensors-0.3.1-cp311-cp311-macosx_10_11_universal2.whl", hash = "sha256:5af82e10946c4822506db0f29269f43147e889054704dde994d4e22f0c37377b"}, - {file = "safetensors-0.3.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:626c86dd1d930963c8ea7f953a3787ae85322551e3a5203ac731d6e6f3e18f44"}, - {file = "safetensors-0.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12e30677e6af1f4cc4f2832546e91dbb3b0aa7d575bfa473d2899d524e1ace08"}, - {file = "safetensors-0.3.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d534b80bc8d39945bb902f34b0454773971fe9e5e1f2142af451759d7e52b356"}, - {file = "safetensors-0.3.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ddd0ddd502cf219666e7d30f23f196cb87e829439b52b39f3e7da7918c3416df"}, - {file = "safetensors-0.3.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:997a2cc14023713f423e6d16536d55cb16a3d72850f142e05f82f0d4c76d383b"}, - {file = "safetensors-0.3.1-cp311-cp311-win32.whl", hash = "sha256:6ae9ca63d9e22f71ec40550207bd284a60a6b4916ae6ca12c85a8d86bf49e0c3"}, - {file = "safetensors-0.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:62aa7421ca455418423e35029524489480adda53e3f702453580180ecfebe476"}, - {file = "safetensors-0.3.1-cp37-cp37m-macosx_10_11_x86_64.whl", hash = "sha256:6d54b3ed367b6898baab75dfd057c24f36ec64d3938ffff2af981d56bfba2f42"}, - {file = "safetensors-0.3.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:262423aeda91117010f8c607889066028f680fbb667f50cfe6eae96f22f9d150"}, - {file = "safetensors-0.3.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:10efe2513a8327fd628cea13167089588acc23093ba132aecfc536eb9a4560fe"}, - {file = "safetensors-0.3.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:689b3d6a7ebce70ee9438267ee55ea89b575c19923876645e927d08757b552fe"}, - {file = "safetensors-0.3.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14cd9a87bc73ce06903e9f8ee8b05b056af6f3c9f37a6bd74997a16ed36ff5f4"}, - {file = "safetensors-0.3.1-cp37-cp37m-win32.whl", hash = "sha256:a77cb39624480d5f143c1cc272184f65a296f573d61629eff5d495d2e0541d3e"}, - {file = "safetensors-0.3.1-cp37-cp37m-win_amd64.whl", hash = "sha256:9eff3190bfbbb52eef729911345c643f875ca4dbb374aa6c559675cfd0ab73db"}, - {file = "safetensors-0.3.1-cp38-cp38-macosx_10_11_x86_64.whl", hash = "sha256:05cbfef76e4daa14796db1bbb52072d4b72a44050c368b2b1f6fd3e610669a89"}, - {file = "safetensors-0.3.1-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:c49061461f4a81e5ec3415070a3f135530834c89cbd6a7db7cd49e3cb9d9864b"}, - {file = "safetensors-0.3.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22cf7e73ca42974f098ce0cf4dd8918983700b6b07a4c6827d50c8daefca776e"}, - {file = "safetensors-0.3.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:04f909442d6223ff0016cd2e1b2a95ef8039b92a558014627363a2e267213f62"}, - {file = "safetensors-0.3.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2c573c5a0d5d45791ae8c179e26d74aff86e719056591aa7edb3ca7be55bc961"}, - {file = "safetensors-0.3.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6994043b12e717cf2a6ba69077ac41f0d3675b2819734f07f61819e854c622c7"}, - {file = "safetensors-0.3.1-cp38-cp38-win32.whl", hash = "sha256:158ede81694180a0dbba59422bc304a78c054b305df993c0c6e39c6330fa9348"}, - {file = "safetensors-0.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:afdc725beff7121ea8d39a7339f5a6abcb01daa189ea56290b67fe262d56e20f"}, - {file = "safetensors-0.3.1-cp39-cp39-macosx_10_11_x86_64.whl", hash = "sha256:cba910fcc9e5e64d32d62b837388721165e9c7e45d23bc3a38ad57694b77f40d"}, - {file = "safetensors-0.3.1-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:a4f7dbfe7285573cdaddd85ef6fa84ebbed995d3703ab72d71257944e384612f"}, - {file = "safetensors-0.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54aed0802f9eaa83ca7b1cbb986bfb90b8e2c67b6a4bcfe245627e17dad565d4"}, - {file = "safetensors-0.3.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:34b75a766f3cfc99fd4c33e329b76deae63f5f388e455d863a5d6e99472fca8e"}, - {file = "safetensors-0.3.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1a0f31904f35dc14919a145b2d7a2d8842a43a18a629affe678233c4ea90b4af"}, - {file = "safetensors-0.3.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcf527ecc5f58907fd9031510378105487f318cc91ecdc5aee3c7cc8f46030a8"}, - {file = "safetensors-0.3.1-cp39-cp39-win32.whl", hash = "sha256:e2f083112cf97aa9611e2a05cc170a2795eccec5f6ff837f4565f950670a9d83"}, - {file = "safetensors-0.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:5f4f614b8e8161cd8a9ca19c765d176a82b122fa3d3387b77862145bfe9b4e93"}, - {file = "safetensors-0.3.1.tar.gz", hash = "sha256:571da56ff8d0bec8ae54923b621cda98d36dcef10feb36fd492c4d0c2cd0e869"}, -] - -[package.extras] -all = ["black (==22.3)", "click (==8.0.4)", "flake8 (>=3.8.3)", "flax (>=0.6.3)", "h5py (>=3.7.0)", "huggingface-hub (>=0.12.1)", "isort (>=5.5.4)", "jax (>=0.3.25)", "jaxlib (>=0.3.25)", "numpy (>=1.21.6)", "paddlepaddle (>=2.4.1)", "pytest (>=7.2.0)", "pytest-benchmark (>=4.0.0)", "setuptools-rust (>=1.5.2)", "tensorflow (>=2.11.0)", "torch (>=1.10)"] -dev = ["black (==22.3)", "click (==8.0.4)", "flake8 (>=3.8.3)", "flax (>=0.6.3)", "h5py (>=3.7.0)", "huggingface-hub (>=0.12.1)", "isort (>=5.5.4)", "jax (>=0.3.25)", "jaxlib (>=0.3.25)", "numpy (>=1.21.6)", "paddlepaddle (>=2.4.1)", "pytest (>=7.2.0)", "pytest-benchmark (>=4.0.0)", "setuptools-rust (>=1.5.2)", "tensorflow (>=2.11.0)", "torch (>=1.10)"] + {file = "safetensors-0.3.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:b6a66989075c2891d743153e8ba9ca84ee7232c8539704488f454199b8b8f84d"}, + {file = "safetensors-0.3.2-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:670d6bc3a3b377278ce2971fa7c36ebc0a35041c4ea23b9df750a39380800195"}, + {file = "safetensors-0.3.2-cp310-cp310-macosx_13_0_x86_64.whl", hash = "sha256:7f80af7e4ab3188daaff12d43d078da3017a90d732d38d7af4eb08b6ca2198a5"}, + {file = "safetensors-0.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cbb44e140bf2aeda98d9dde669dbec15f7b77f96a9274469b91a6cf4bcc5ec3b"}, + {file = "safetensors-0.3.2-cp310-cp310-win32.whl", hash = "sha256:2961c1243fd0da46aa6a1c835305cc4595486f8ac64632a604d0eb5f2de76175"}, + {file = "safetensors-0.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:c813920482c337d1424d306e1b05824a38e3ef94303748a0a287dea7a8c4f805"}, + {file = "safetensors-0.3.2-cp311-cp311-macosx_10_11_universal2.whl", hash = "sha256:707df34bd9b9047e97332136ad98e57028faeccdb9cfe1c3b52aba5964cc24bf"}, + {file = "safetensors-0.3.2-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:becc5bb85b2947eae20ed23b407ebfd5277d9a560f90381fe2c42e6c043677ba"}, + {file = "safetensors-0.3.2-cp311-cp311-macosx_13_0_universal2.whl", hash = "sha256:54ad6af663e15e2b99e2ea3280981b7514485df72ba6d014dc22dae7ba6a5e6c"}, + {file = "safetensors-0.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ada0fac127ff8fb04834da5c6d85a8077e6a1c9180a11251d96f8068db922a17"}, + {file = "safetensors-0.3.2-cp311-cp311-win32.whl", hash = "sha256:155b82dbe2b0ebff18cde3f76b42b6d9470296e92561ef1a282004d449fa2b4c"}, + {file = "safetensors-0.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:a86428d196959619ce90197731be9391b5098b35100a7228ef4643957648f7f5"}, + {file = "safetensors-0.3.2-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:c1f8ab41ed735c5b581f451fd15d9602ff51aa88044bfa933c5fa4b1d0c644d1"}, + {file = "safetensors-0.3.2-cp37-cp37m-macosx_13_0_x86_64.whl", hash = "sha256:bc9cfb3c9ea2aec89685b4d656f9f2296f0f0d67ecf2bebf950870e3be89b3db"}, + {file = "safetensors-0.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d7d70d48585fe8df00725aa788f2e64fd24a4c9ae07cd6be34f6859d0f89a9c"}, + {file = "safetensors-0.3.2-cp37-cp37m-win32.whl", hash = "sha256:6ff59bc90cdc857f68b1023be9085fda6202bbe7f2fd67d06af8f976d6adcc10"}, + {file = "safetensors-0.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:8b05c93da15fa911763a89281906ca333ed800ab0ef1c7ce53317aa1a2322f19"}, + {file = "safetensors-0.3.2-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:8969cfd9e8d904e8d3c67c989e1bd9a95e3cc8980d4f95e4dcd43c299bb94253"}, + {file = "safetensors-0.3.2-cp38-cp38-macosx_13_0_x86_64.whl", hash = "sha256:f54148ac027556eb02187e9bc1556c4d916c99ca3cb34ca36a7d304d675035c1"}, + {file = "safetensors-0.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa98f49e95f02eb750d32c4947e7d5aa43883149ebd0414920866446525b70f0"}, + {file = "safetensors-0.3.2-cp38-cp38-win32.whl", hash = "sha256:33409df5e28a83dc5cc5547a3ac17c0f1b13a1847b1eb3bc4b3be0df9915171e"}, + {file = "safetensors-0.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:e04a7cbbb3856159ab99e3adb14521544f65fcb8548cce773a1435a0f8d78d27"}, + {file = "safetensors-0.3.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:7c864cf5dcbfb608c5378f83319c60cc9c97263343b57c02756b7613cd5ab4dd"}, + {file = "safetensors-0.3.2-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:14e8c19d6dc51d4f70ee33c46aff04c8ba3f95812e74daf8036c24bc86e75cae"}, + {file = "safetensors-0.3.2-cp39-cp39-macosx_13_0_x86_64.whl", hash = "sha256:fafd95e5ef41e8f312e2a32b7031f7b9b2a621b255f867b221f94bb2e9f51ae8"}, + {file = "safetensors-0.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87ff0024ef2e5722a79af24688ce4a430f70601d0cf712a744105ed4b8f67ba5"}, + {file = "safetensors-0.3.2-cp39-cp39-win32.whl", hash = "sha256:827af9478b78977248ba93e2fd97ea307fb63f463f80cef4824460f8c2542a52"}, + {file = "safetensors-0.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:9b09f27c456efa301f98681ea14b12f81f2637889f6336223ccab71e42c34541"}, + {file = "safetensors-0.3.2.tar.gz", hash = "sha256:2dbd34554ed3b99435a0e84df077108f5334c8336b5ed9cb8b6b98f7b10da2f6"}, +] + +[package.extras] +all = ["black (==22.3)", "click (==8.0.4)", "flake8 (>=3.8.3)", "flax (>=0.6.3)", "h5py (>=3.7.0)", "huggingface-hub (>=0.12.1)", "isort (>=5.5.4)", "jax (>=0.3.25)", "jaxlib (>=0.3.25)", "numpy (>=1.21.6)", "paddlepaddle (>=2.4.1)", "pytest (>=7.2.0)", "pytest-benchmark (>=4.0.0)", "setuptools-rust (>=1.5.2)", "tensorflow (==2.11.0)", "torch (>=1.10)"] +dev = ["black (==22.3)", "click (==8.0.4)", "flake8 (>=3.8.3)", "flax (>=0.6.3)", "h5py (>=3.7.0)", "huggingface-hub (>=0.12.1)", "isort (>=5.5.4)", "jax (>=0.3.25)", "jaxlib (>=0.3.25)", "numpy (>=1.21.6)", "paddlepaddle (>=2.4.1)", "pytest (>=7.2.0)", "pytest-benchmark (>=4.0.0)", "setuptools-rust (>=1.5.2)", "tensorflow (==2.11.0)", "torch (>=1.10)"] jax = ["flax (>=0.6.3)", "jax (>=0.3.25)", "jaxlib (>=0.3.25)"] numpy = ["numpy (>=1.21.6)"] paddlepaddle = ["paddlepaddle (>=2.4.1)"] +pinned-tf = ["tensorflow (==2.11.0)"] quality = ["black (==22.3)", "click (==8.0.4)", "flake8 (>=3.8.3)", "isort (>=5.5.4)"] tensorflow = ["tensorflow (>=2.11.0)"] testing = ["h5py (>=3.7.0)", "huggingface-hub (>=0.12.1)", "numpy (>=1.21.6)", "pytest (>=7.2.0)", "pytest-benchmark (>=4.0.0)", "setuptools-rust (>=1.5.2)"] @@ -6431,13 +6484,13 @@ doc = ["reno", "sphinx", "tornado (>=4.5)"] [[package]] name = "textual" -version = "0.31.0" +version = "0.32.0" description = "Modern Text User Interface framework" optional = true python-versions = ">=3.7,<4.0" files = [ - {file = "textual-0.31.0-py3-none-any.whl", hash = "sha256:1243bccadb28e1ff46bdfe676ee25a6ce52756842bc9dca4d824e0bc4d7d9a42"}, - {file = "textual-0.31.0.tar.gz", hash = "sha256:e2b43f1c26b21731ee83f558f8d6cb4f7163e3a713854c36cd7785139a0e4e51"}, + {file = "textual-0.32.0-py3-none-any.whl", hash = "sha256:81fc68406c8806bc864e2f035874a868b4ff0cf466289dce5f7b31869949383b"}, + {file = "textual-0.32.0.tar.gz", hash = "sha256:f7b6683bc18faee6fd3c47cfbad43fbf8273c5fecc12230d52ce5ee089021327"}, ] [package.dependencies] @@ -6688,20 +6741,20 @@ files = [ [[package]] name = "tqdm" -version = "4.65.0" +version = "4.65.1" description = "Fast, Extensible Progress Meter" optional = false python-versions = ">=3.7" files = [ - {file = "tqdm-4.65.0-py3-none-any.whl", hash = "sha256:c4f53a17fe37e132815abceec022631be8ffe1b9381c2e6e30aa70edc99e9671"}, - {file = "tqdm-4.65.0.tar.gz", hash = "sha256:1871fb68a86b8fb3b59ca4cdd3dcccbc7e6d613eeed31f4c332531977b89beb5"}, + {file = "tqdm-4.65.1-py3-none-any.whl", hash = "sha256:16181c62ad2c6f8f6f29876e66322faad1c7fd3cc70aa9cc25ff63e50d1da031"}, + {file = "tqdm-4.65.1.tar.gz", hash = "sha256:2cb0075cc5269f8edac40bdeb757cc36ab5b6648caf014822b67e1a49fba141d"}, ] [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} [package.extras] -dev = ["py-make (>=0.1.0)", "twine", "wheel"] +dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] notebook = ["ipywidgets (>=6)"] slack = ["slack-sdk"] telegram = ["requests"] @@ -7296,13 +7349,13 @@ files = [ [[package]] name = "wheel" -version = "0.41.0" +version = "0.41.1" description = "A built-package format for Python" optional = false python-versions = ">=3.7" files = [ - {file = "wheel-0.41.0-py3-none-any.whl", hash = "sha256:7e9be3bbd0078f6147d82ed9ed957e323e7708f57e134743d2edef3a7b7972a9"}, - {file = "wheel-0.41.0.tar.gz", hash = "sha256:55a0f0a5a84869bce5ba775abfd9c462e3a6b1b7b7ec69d72c0b83d673a5114d"}, + {file = "wheel-0.41.1-py3-none-any.whl", hash = "sha256:473219bd4cbedc62cea0cb309089b593e47c15c4a2531015f94e4e3b9a0f6981"}, + {file = "wheel-0.41.1.tar.gz", hash = "sha256:12b911f083e876e10c595779709f8a88a59f45aacc646492a67fe9ef796c1b47"}, ] [package.extras] @@ -7615,4 +7668,4 @@ local = ["ctransformers", "llama-cpp-python", "sentence-transformers"] [metadata] lock-version = "2.0" python-versions = ">=3.9,<3.11" -content-hash = "154238df4e8b249196011f592d63e2fad0e18316960e400037f54453c813d47a" +content-hash = "7c6d7dc33a9b0ae9da053fb78b9f2eabbe78df38c4763e5a8719df6249d6f657" diff --git a/pyproject.toml b/pyproject.toml index 849130a74b..5b7054b65c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "langflow" -version = "0.4.0" +version = "0.4.3" description = "A Python package with a built-in web application" authors = ["Logspace "] maintainers = [ @@ -19,7 +19,7 @@ readme = "README.md" keywords = ["nlp", "langchain", "openai", "gpt", "gui"] packages = [{ include = "langflow", from = "src/backend" }] include = ["src/backend/langflow/*", "src/backend/langflow/**/*"] - +documentation = "https://docs.langflow.org" [tool.poetry.scripts] langflow = "langflow.__main__:main" @@ -33,7 +33,7 @@ google-search-results = "^2.4.1" google-api-python-client = "^2.79.0" typer = "^0.9.0" gunicorn = "^21.1.0" -langchain = "^0.0.250" +langchain = "^0.0.256" openai = "^0.27.8" pandas = "^2.0.0" chromadb = "^0.3.21" @@ -63,7 +63,7 @@ python-multipart = "^0.0.6" sqlmodel = "^0.0.8" faiss-cpu = "^1.7.4" anthropic = "^0.3.0" -orjson = "^3.9.1" +orjson = "3.9.3" multiprocess = "^0.70.14" cachetools = "^5.3.1" types-cachetools = "^5.3.0.5" @@ -76,6 +76,8 @@ google-cloud-aiplatform = "^1.26.1" psycopg = "^3.1.9" psycopg-binary = "^3.1.9" fastavro = "^1.8.0" +langchain-experimental = "^0.0.8" +alembic = "^1.11.2" [tool.poetry.group.dev.dependencies] black = "^23.1.0" diff --git a/src/backend/langflow/__init__.py b/src/backend/langflow/__init__.py index 5920369e26..f6eb836cc6 100644 --- a/src/backend/langflow/__init__.py +++ b/src/backend/langflow/__init__.py @@ -1,5 +1,7 @@ from importlib import metadata -from langflow.cache import cache_manager + +# Deactivate cache manager for now +# from langflow.services.cache import cache_manager from langflow.processing.process import load_flow_from_json from langflow.interface.custom.custom_component import CustomComponent diff --git a/src/backend/langflow/__main__.py b/src/backend/langflow/__main__.py index 58789908a9..43247b10f8 100644 --- a/src/backend/langflow/__main__.py +++ b/src/backend/langflow/__main__.py @@ -1,7 +1,7 @@ -import os import sys import time import httpx +from langflow.services.utils import get_settings_manager from langflow.utils.util import get_number_of_workers from multiprocess import Process # type: ignore import platform @@ -13,7 +13,6 @@ from rich import print as rprint import typer from langflow.main import setup_app -from langflow.settings import settings from langflow.utils.logger import configure, logger import webbrowser from dotenv import load_dotenv @@ -25,49 +24,25 @@ def update_settings( config: str, cache: str, dev: bool = False, - database_url: Optional[str] = None, remove_api_keys: bool = False, components_path: Optional[Path] = None, ): """Update the settings from a config file.""" # Check for database_url in the environment variables - database_url = database_url or os.getenv("langflow_database_url") - + settings_manager = get_settings_manager() if config: logger.debug(f"Loading settings from {config}") - settings.update_from_yaml(config, dev=dev) - if database_url: - settings.update_settings(database_url=database_url) + settings_manager.settings.update_from_yaml(config, dev=dev) if remove_api_keys: logger.debug(f"Setting remove_api_keys to {remove_api_keys}") - settings.update_settings(remove_api_keys=remove_api_keys) + settings_manager.settings.update_settings(REMOVE_API_KEYS=remove_api_keys) if cache: logger.debug(f"Setting cache to {cache}") - settings.update_settings(cache=cache) + settings_manager.settings.update_settings(CACHE=cache) if components_path: logger.debug(f"Adding component path {components_path}") - settings.update_settings(components_path=components_path) - - -def load_params(): - """ - Load the parameters from the environment variables. - """ - global_vars = globals() - - for key, value in global_vars.items(): - env_key = f"LANGFLOW_{key.upper()}" - if env_key in os.environ: - if isinstance(value, bool): - # Handle booleans - global_vars[key] = os.getenv(env_key, str(value)).lower() == "true" - elif isinstance(value, int): - # Handle integers - global_vars[key] = int(os.getenv(env_key, str(value))) - elif isinstance(value, str) or value is None: - # Handle strings and None values - global_vars[key] = os.getenv(env_key, str(value)) + settings_manager.settings.update_settings(COMPONENTS_PATH=components_path) def serve_on_jcloud(): @@ -131,10 +106,12 @@ def serve( help="Path to the directory containing custom components.", envvar="LANGFLOW_COMPONENTS_PATH", ), - config: str = typer.Option("config.yaml", help="Path to the configuration file."), + config: str = typer.Option( + Path(__file__).parent / "config.yaml", help="Path to the configuration file." + ), # .env file param env_file: Path = typer.Option( - ".env", help="Path to the .env file containing environment variables." + None, help="Path to the .env file containing environment variables." ), log_level: str = typer.Option( "critical", help="Logging level.", envvar="LANGFLOW_LOG_LEVEL" @@ -149,11 +126,13 @@ def serve( ), jcloud: bool = typer.Option(False, help="Deploy on Jina AI Cloud"), dev: bool = typer.Option(False, help="Run in development mode (may contain bugs)"), - database_url: str = typer.Option( - None, - help="Database URL to connect to. If not provided, a local SQLite database will be used.", - envvar="LANGFLOW_DATABASE_URL", - ), + # This variable does not work but is set by the .env file + # and works with Pydantic + # database_url: str = typer.Option( + # None, + # help="Database URL to connect to. If not provided, a local SQLite database will be used.", + # envvar="LANGFLOW_DATABASE_URL", + # ), path: str = typer.Option( None, help="Path to the frontend directory containing build files. This is for development purposes only.", @@ -169,6 +148,11 @@ def serve( help="Remove API keys from the projects saved in the database.", envvar="LANGFLOW_REMOVE_API_KEYS", ), + backend_only: bool = typer.Option( + False, + help="Run only the backend server without the frontend.", + envvar="LANGFLOW_BACKEND_ONLY", + ), ): """ Run the Langflow server. @@ -176,7 +160,6 @@ def serve( # override env variables with .env file if env_file: load_dotenv(env_file, override=True) - load_params() if jcloud: return serve_on_jcloud() @@ -185,14 +168,13 @@ def serve( update_settings( config, dev=dev, - database_url=database_url, remove_api_keys=remove_api_keys, cache=cache, components_path=components_path, ) # create path object if path is provided static_files_dir: Optional[Path] = Path(path) if path else None - app = setup_app(static_files_dir=static_files_dir) + app = setup_app(static_files_dir=static_files_dir, backend_only=backend_only) # check if port is being used if is_port_in_use(port, host): port = get_free_port(port) @@ -204,6 +186,10 @@ def serve( "timeout": timeout, } + # Define an env variable to know if we are just testing the server + if "pytest" in sys.modules: + return + if platform.system() in ["Windows"]: # Run using uvicorn on MacOS and Windows # Windows doesn't support gunicorn diff --git a/src/backend/langflow/alembic.ini b/src/backend/langflow/alembic.ini new file mode 100644 index 0000000000..3796614224 --- /dev/null +++ b/src/backend/langflow/alembic.ini @@ -0,0 +1,113 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts +script_location = alembic + +# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s +# Uncomment the line below if you want the files to be prepended with date and time +# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file +# for all available tokens +# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s + +# sys.path path, will be prepended to sys.path if present. +# defaults to the current working directory. +prepend_sys_path = . + +# timezone to use when rendering the date within the migration file +# as well as the filename. +# If specified, requires the python-dateutil library that can be +# installed by adding `alembic[tz]` to the pip requirements +# string value is passed to dateutil.tz.gettz() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the +# "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; This defaults +# to alembic/versions. When using multiple version +# directories, initial revisions must be specified with --version-path. +# The path separator used here should be the separator specified by "version_path_separator" below. +# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions + +# version path separator; As mentioned above, this is the character used to split +# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. +# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. +# Valid values for version_path_separator are: +# +# version_path_separator = : +# version_path_separator = ; +# version_path_separator = space +version_path_separator = os # Use os.pathsep. Default configuration used for new projects. + +# set to 'true' to search source files recursively +# in each "version_locations" directory +# new in Alembic version 1.10 +# recursive_version_locations = false + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +# This is the path to the db in the root of the project. +# When the user runs the Langflow the database url will +# be set dinamically. +sqlalchemy.url = sqlite:///../../../langflow.db + + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - use the console_scripts runner, against the "black" entrypoint +# hooks = black +# black.type = console_scripts +# black.entrypoint = black +# black.options = -l 79 REVISION_SCRIPT_FILENAME + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/src/backend/langflow/alembic/README b/src/backend/langflow/alembic/README new file mode 100644 index 0000000000..98e4f9c44e --- /dev/null +++ b/src/backend/langflow/alembic/README @@ -0,0 +1 @@ +Generic single-database configuration. \ No newline at end of file diff --git a/src/backend/langflow/alembic/env.py b/src/backend/langflow/alembic/env.py new file mode 100644 index 0000000000..3108944312 --- /dev/null +++ b/src/backend/langflow/alembic/env.py @@ -0,0 +1,78 @@ +from logging.config import fileConfig + +from sqlalchemy import engine_from_config +from sqlalchemy import pool + +from alembic import context + +from langflow.services.database.manager import SQLModel + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +if config.config_file_name is not None: + fileConfig(config.config_file_name) + +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata +target_metadata = SQLModel.metadata + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + + +def run_migrations_offline() -> None: + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online() -> None: + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + connectable = engine_from_config( + config.get_section(config.config_ini_section, {}), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + with connectable.connect() as connection: + context.configure(connection=connection, target_metadata=target_metadata) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/src/backend/langflow/alembic/script.py.mako b/src/backend/langflow/alembic/script.py.mako new file mode 100644 index 0000000000..6ce3351093 --- /dev/null +++ b/src/backend/langflow/alembic/script.py.mako @@ -0,0 +1,27 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +import sqlmodel +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision: str = ${repr(up_revision)} +down_revision: Union[str, None] = ${repr(down_revision)} +branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} +depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} + + +def upgrade() -> None: + ${upgrades if upgrades else "pass"} + + +def downgrade() -> None: + ${downgrades if downgrades else "pass"} diff --git a/src/backend/langflow/alembic/versions/0a534bdfd84b_remove_flowstyles_table.py b/src/backend/langflow/alembic/versions/0a534bdfd84b_remove_flowstyles_table.py new file mode 100644 index 0000000000..0100df44dc --- /dev/null +++ b/src/backend/langflow/alembic/versions/0a534bdfd84b_remove_flowstyles_table.py @@ -0,0 +1,42 @@ +"""Remove FlowStyles table + +Revision ID: 0a534bdfd84b +Revises: 4814b6f4abfd +Create Date: 2023-08-07 14:09:06.844104 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = "0a534bdfd84b" +down_revision: Union[str, None] = "4814b6f4abfd" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table("flowstyle") + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.create_table( + "flowstyle", + sa.Column("color", sa.VARCHAR(), nullable=False), + sa.Column("emoji", sa.VARCHAR(), nullable=False), + sa.Column("flow_id", sa.CHAR(length=32), nullable=True), + sa.Column("id", sa.CHAR(length=32), nullable=False), + sa.ForeignKeyConstraint( + ["flow_id"], + ["flow.id"], + ), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("id"), + ) + # ### end Alembic commands ### diff --git a/src/backend/langflow/alembic/versions/4814b6f4abfd_add_flow_table.py b/src/backend/langflow/alembic/versions/4814b6f4abfd_add_flow_table.py new file mode 100644 index 0000000000..0b2f32657a --- /dev/null +++ b/src/backend/langflow/alembic/versions/4814b6f4abfd_add_flow_table.py @@ -0,0 +1,65 @@ +"""Add Flow table + +Revision ID: 4814b6f4abfd +Revises: +Create Date: 2023-08-05 17:47:42.879824 + +""" + +import contextlib +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +import sqlmodel + + +# revision identifiers, used by Alembic. +revision: str = "4814b6f4abfd" +down_revision: Union[str, None] = None +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + + # This suppress is used to not break the migration if the table already exists. + with contextlib.suppress(sa.exc.OperationalError): + op.create_table( + "flow", + sa.Column("data", sa.JSON(), nullable=True), + sa.Column("name", sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column("description", sqlmodel.sql.sqltypes.AutoString(), nullable=True), + sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("id"), + ) + op.create_index( + op.f("ix_flow_description"), "flow", ["description"], unique=False + ) + op.create_index(op.f("ix_flow_name"), "flow", ["name"], unique=False) + with contextlib.suppress(sa.exc.OperationalError): + op.create_table( + "flowstyle", + sa.Column("color", sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column("emoji", sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column("flow_id", sqlmodel.sql.sqltypes.GUID(), nullable=True), + sa.Column("id", sqlmodel.sql.sqltypes.GUID(), nullable=False), + sa.ForeignKeyConstraint( + ["flow_id"], + ["flow.id"], + ), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("id"), + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table("flowstyle") + op.drop_index(op.f("ix_flow_name"), table_name="flow") + op.drop_index(op.f("ix_flow_description"), table_name="flow") + op.drop_table("flow") + # ### end Alembic commands ### diff --git a/src/backend/langflow/api/router.py b/src/backend/langflow/api/router.py index b9c51c11e2..ea1938a750 100644 --- a/src/backend/langflow/api/router.py +++ b/src/backend/langflow/api/router.py @@ -5,7 +5,6 @@ endpoints_router, validate_router, flows_router, - flow_styles_router, component_router, ) @@ -17,4 +16,3 @@ router.include_router(validate_router) router.include_router(component_router) router.include_router(flows_router) -router.include_router(flow_styles_router) diff --git a/src/backend/langflow/api/utils.py b/src/backend/langflow/api/utils.py index 91fa93ea40..0fb53e5414 100644 --- a/src/backend/langflow/api/utils.py +++ b/src/backend/langflow/api/utils.py @@ -66,3 +66,30 @@ def merge_nested_dicts(dict1, dict2): else: dict1[key] = value return dict1 + + +def merge_nested_dicts_with_renaming(dict1, dict2): + for key, value in dict2.items(): + if ( + key in dict1 + and isinstance(value, dict) + and isinstance(dict1.get(key), dict) + ): + for sub_key, sub_value in value.items(): + if sub_key in dict1[key]: + new_key = get_new_key(dict1[key], sub_key) + dict1[key][new_key] = sub_value + else: + dict1[key][sub_key] = sub_value + else: + dict1[key] = value + return dict1 + + +def get_new_key(dictionary, original_key): + counter = 1 + new_key = original_key + " (" + str(counter) + ")" + while new_key in dictionary: + counter += 1 + new_key = original_key + " (" + str(counter) + ")" + return new_key diff --git a/src/backend/langflow/api/v1/__init__.py b/src/backend/langflow/api/v1/__init__.py index f001152a96..b6e7b36d83 100644 --- a/src/backend/langflow/api/v1/__init__.py +++ b/src/backend/langflow/api/v1/__init__.py @@ -2,7 +2,6 @@ from langflow.api.v1.validate import router as validate_router from langflow.api.v1.chat import router as chat_router from langflow.api.v1.flows import router as flows_router -from langflow.api.v1.flow_styles import router as flow_styles_router from langflow.api.v1.components import router as component_router __all__ = [ @@ -11,5 +10,4 @@ "component_router", "validate_router", "flows_router", - "flow_styles_router", ] diff --git a/src/backend/langflow/api/v1/chat.py b/src/backend/langflow/api/v1/chat.py index 4248bcebd5..611407e8d8 100644 --- a/src/backend/langflow/api/v1/chat.py +++ b/src/backend/langflow/api/v1/chat.py @@ -3,13 +3,13 @@ from langflow.api.utils import build_input_keys_response from langflow.api.v1.schemas import BuildStatus, BuiltResponse, InitResponse, StreamData -from langflow.chat.manager import ChatManager +from langflow.services import service_manager, ServiceType from langflow.graph.graph.base import Graph from langflow.utils.logger import logger from cachetools import LRUCache router = APIRouter(tags=["Chat"]) -chat_manager = ChatManager() + flow_data_store: LRUCache = LRUCache(maxsize=10) @@ -17,6 +17,7 @@ async def chat(client_id: str, websocket: WebSocket): """Websocket endpoint for chat.""" try: + chat_manager = service_manager.get(ServiceType.CHAT_MANAGER) if client_id in chat_manager.in_memory_cache: await chat_manager.handle_websocket(client_id, websocket) else: @@ -45,6 +46,7 @@ async def init_build(graph_data: dict, flow_id: str): return InitResponse(flowId=flow_id) # Delete from cache if already exists + chat_manager = service_manager.get(ServiceType.CHAT_MANAGER) if flow_id in chat_manager.in_memory_cache: with chat_manager.in_memory_cache._lock: chat_manager.in_memory_cache.delete(flow_id) @@ -125,9 +127,8 @@ async def event_stream(flow_id): vertex.build() params = vertex._built_object_repr() valid = True - logger.debug( - f"Building node {str(params)[:50]}{'...' if len(str(params)) > 50 else ''}" - ) + logger.debug(f"Building node {str(vertex.vertex_type)}") + logger.debug(f"Output: {params}") if vertex.artifacts: # The artifacts will be prompt variables # passed to build_input_keys_response @@ -156,12 +157,12 @@ async def event_stream(flow_id): ) else: input_keys_response = { - "input_keys": {}, + "input_keys": None, "memory_keys": [], "handle_keys": [], } yield str(StreamData(event="message", data=input_keys_response)) - + chat_manager = service_manager.get(ServiceType.CHAT_MANAGER) chat_manager.set_cache(flow_id, langchain_object) # We need to reset the chat history chat_manager.chat_history.empty_history(flow_id) diff --git a/src/backend/langflow/api/v1/components.py b/src/backend/langflow/api/v1/components.py index 1e34da2aaa..4071461fb0 100644 --- a/src/backend/langflow/api/v1/components.py +++ b/src/backend/langflow/api/v1/components.py @@ -1,8 +1,8 @@ from datetime import timezone from typing import List from uuid import UUID -from langflow.database.models.component import Component, ComponentModel -from langflow.database.base import get_session +from langflow.services.database.models.component import Component, ComponentModel +from langflow.services.utils import get_session from sqlmodel import Session, select from fastapi import APIRouter, Depends, HTTPException from sqlalchemy.exc import IntegrityError diff --git a/src/backend/langflow/api/v1/endpoints.py b/src/backend/langflow/api/v1/endpoints.py index f11a233408..5d0c9a9004 100644 --- a/src/backend/langflow/api/v1/endpoints.py +++ b/src/backend/langflow/api/v1/endpoints.py @@ -1,19 +1,15 @@ from http import HTTPStatus from typing import Annotated, Optional -from langflow.cache.utils import save_uploaded_file -from langflow.database.models.flow import Flow +from langflow.services.cache.utils import save_uploaded_file +from langflow.services.database.models.flow import Flow from langflow.processing.process import process_graph_cached, process_tweaks +from langflow.services.utils import get_settings_manager from langflow.utils.logger import logger -from langflow.settings import settings - from fastapi import APIRouter, Depends, HTTPException, UploadFile, Body from langflow.interface.custom.custom_component import CustomComponent -from langflow.interface.custom.directory_reader import ( - CustomComponentPathValueError, -) from langflow.api.v1.schemas import ( ProcessResponse, @@ -21,7 +17,7 @@ CustomComponentCode, ) -from langflow.api.utils import merge_nested_dicts +from langflow.api.utils import merge_nested_dicts_with_renaming from langflow.interface.types import ( build_langchain_types_dict, @@ -29,7 +25,7 @@ build_langchain_custom_component_list_from_path, ) -from langflow.database.base import get_session +from langflow.services.utils import get_session from sqlmodel import Session # build router @@ -38,48 +34,38 @@ @router.get("/all") def get_all(): + logger.debug("Building langchain types dict") native_components = build_langchain_types_dict() - # custom_components is a list of dicts # need to merge all the keys into one dict custom_components_from_file = {} - if settings.components_path: + settings_manager = get_settings_manager() + if settings_manager.settings.COMPONENTS_PATH: + logger.info( + f"Building custom components from {settings_manager.settings.COMPONENTS_PATH}" + ) custom_component_dicts = [ build_langchain_custom_component_list_from_path(str(path)) - for path in settings.components_path + for path in settings_manager.settings.COMPONENTS_PATH ] + logger.info(f"Loading {len(custom_component_dicts)} category(ies)") for custom_component_dict in custom_component_dicts: - custom_components_from_file = merge_nested_dicts( + # custom_component_dict is a dict of dicts + if not custom_component_dict: + continue + category = list(custom_component_dict.keys())[0] + logger.info( + f"Loading {len(custom_component_dict[category])} component(s) from category {category}" + ) + logger.debug(custom_component_dict) + custom_components_from_file = merge_nested_dicts_with_renaming( custom_components_from_file, custom_component_dict ) - return merge_nested_dicts(native_components, custom_components_from_file) - -@router.get("/load_custom_component_from_path") -def get_load_custom_component_from_path(path: str): - try: - data = build_langchain_custom_component_list_from_path(path) - except CustomComponentPathValueError as err: - raise HTTPException( - status_code=400, - detail={"error": type(err).__name__, "traceback": str(err)}, - ) from err - - return data - - -@router.get("/load_custom_component_from_path_TEST") -def get_load_custom_component_from_path_test(path: str): - from langflow.interface.custom.directory_reader import ( - DirectoryReader, + return merge_nested_dicts_with_renaming( + native_components, custom_components_from_file ) - reader = DirectoryReader(path, False) - file_list = reader.get_files() - data = reader.build_component_menu_list(file_list) - - return reader.filter_loaded_components(data, True) - # For backwards compatibility we will keep the old endpoint @router.post("/predict/{flow_id}", response_model=ProcessResponse) diff --git a/src/backend/langflow/api/v1/flow_styles.py b/src/backend/langflow/api/v1/flow_styles.py deleted file mode 100644 index 40e292eb30..0000000000 --- a/src/backend/langflow/api/v1/flow_styles.py +++ /dev/null @@ -1,83 +0,0 @@ -from uuid import UUID -from langflow.database.models.flow_style import ( - FlowStyle, - FlowStyleCreate, - FlowStyleRead, - FlowStyleUpdate, -) -from langflow.database.base import get_session -from sqlmodel import Session, select -from fastapi import APIRouter, Depends, HTTPException - - -# build router -router = APIRouter(prefix="/flow_styles", tags=["FlowStyles"]) - -# FlowStyleCreate: -# class FlowStyleBase(SQLModel): -# color: str = Field(index=True) -# emoji: str = Field(index=False) -# flow_id: UUID = Field(default=None, foreign_key="flow.id") - - -@router.post("/", response_model=FlowStyleRead) -def create_flow_style( - *, session: Session = Depends(get_session), flow_style: FlowStyleCreate -): - """Create a new flow_style.""" - db_flow_style = FlowStyle.from_orm(flow_style) - session.add(db_flow_style) - session.commit() - session.refresh(db_flow_style) - return db_flow_style - - -@router.get("/", response_model=list[FlowStyleRead]) -def read_flow_styles(*, session: Session = Depends(get_session)): - """Read all flows.""" - try: - flows = session.exec(select(FlowStyle)).all() - except Exception as e: - raise HTTPException(status_code=500, detail=str(e)) from e - return flows - - -@router.get("/{flow_styles_id}", response_model=FlowStyleRead) -def read_flow_style(*, session: Session = Depends(get_session), flow_styles_id: UUID): - """Read a flow_style.""" - if flow_style := session.get(FlowStyle, flow_styles_id): - return flow_style - else: - raise HTTPException(status_code=404, detail="FlowStyle not found") - - -@router.patch("/{flow_style_id}", response_model=FlowStyleRead) -def update_flow_style( - *, - session: Session = Depends(get_session), - flow_style_id: UUID, - flow_style: FlowStyleUpdate, -): - """Update a flow_style.""" - db_flow_style = session.get(FlowStyle, flow_style_id) - if not db_flow_style: - raise HTTPException(status_code=404, detail="FlowStyle not found") - flow_data = flow_style.dict(exclude_unset=True) - for key, value in flow_data.items(): - if hasattr(db_flow_style, key) and value is not None: - setattr(db_flow_style, key, value) - session.add(db_flow_style) - session.commit() - session.refresh(db_flow_style) - return db_flow_style - - -@router.delete("/{flow_id}") -def delete_flow_style(*, session: Session = Depends(get_session), flow_id: UUID): - """Delete a flow_style.""" - flow_style = session.get(FlowStyle, flow_id) - if not flow_style: - raise HTTPException(status_code=404, detail="FlowStyle not found") - session.delete(flow_style) - session.commit() - return {"message": "FlowStyle deleted successfully"} diff --git a/src/backend/langflow/api/v1/flows.py b/src/backend/langflow/api/v1/flows.py index 4e000a128b..3145ced3ce 100644 --- a/src/backend/langflow/api/v1/flows.py +++ b/src/backend/langflow/api/v1/flows.py @@ -1,16 +1,15 @@ from typing import List from uuid import UUID -from langflow.settings import settings from langflow.api.utils import remove_api_keys from langflow.api.v1.schemas import FlowListCreate, FlowListRead -from langflow.database.models.flow import ( +from langflow.services.database.models.flow import ( Flow, FlowCreate, FlowRead, - FlowReadWithStyle, FlowUpdate, ) -from langflow.database.base import get_session +from langflow.services.utils import get_session +from langflow.services.utils import get_settings_manager from sqlmodel import Session, select from fastapi import APIRouter, Depends, HTTPException from fastapi.encoders import jsonable_encoder @@ -32,7 +31,7 @@ def create_flow(*, session: Session = Depends(get_session), flow: FlowCreate): return db_flow -@router.get("/", response_model=list[FlowReadWithStyle], status_code=200) +@router.get("/", response_model=list[FlowRead], status_code=200) def read_flows(*, session: Session = Depends(get_session)): """Read all flows.""" try: @@ -42,7 +41,7 @@ def read_flows(*, session: Session = Depends(get_session)): return [jsonable_encoder(flow) for flow in flows] -@router.get("/{flow_id}", response_model=FlowReadWithStyle, status_code=200) +@router.get("/{flow_id}", response_model=FlowRead, status_code=200) def read_flow(*, session: Session = Depends(get_session), flow_id: UUID): """Read a flow.""" if flow := session.get(Flow, flow_id): @@ -61,7 +60,8 @@ def update_flow( if not db_flow: raise HTTPException(status_code=404, detail="Flow not found") flow_data = flow.dict(exclude_unset=True) - if settings.remove_api_keys: + settings_manager = get_settings_manager() + if settings_manager.settings.REMOVE_API_KEYS: flow_data = remove_api_keys(flow_data) for key, value in flow_data.items(): setattr(db_flow, key, value) diff --git a/src/backend/langflow/api/v1/schemas.py b/src/backend/langflow/api/v1/schemas.py index 0148dac6db..776e90034a 100644 --- a/src/backend/langflow/api/v1/schemas.py +++ b/src/backend/langflow/api/v1/schemas.py @@ -1,7 +1,7 @@ from enum import Enum from pathlib import Path from typing import Any, Dict, List, Optional, Union -from langflow.database.models.flow import FlowCreate, FlowRead +from langflow.services.database.models.flow import FlowCreate, FlowRead from pydantic import BaseModel, Field, validator import json diff --git a/src/backend/langflow/auth/auth.py b/src/backend/langflow/auth/auth.py index e33ac64ddd..3a7e324c28 100644 --- a/src/backend/langflow/auth/auth.py +++ b/src/backend/langflow/auth/auth.py @@ -1,13 +1,14 @@ from typing import Annotated -from fastapi import Depends, HTTPException, status -from passlib.context import CryptContext from jose import JWTError, jwt -from datetime import datetime, timedelta, timezone +from sqlalchemy.orm import Session +from passlib.context import CryptContext from fastapi.security import OAuth2PasswordBearer +from fastapi import Depends, HTTPException, status +from datetime import datetime, timedelta, timezone + from langflow.database.models.token import TokenData from langflow.database.models.user import get_user, User -from sqlalchemy.orm import Session -from langflow.database.base import get_session +from langflow.services.utils import get_session # TODO: Move to env - Test propose!!!!! diff --git a/src/backend/langflow/cache/__init__.py b/src/backend/langflow/cache/__init__.py deleted file mode 100644 index 723aa9e18e..0000000000 --- a/src/backend/langflow/cache/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -from langflow.cache.manager import cache_manager -from langflow.cache.flow import InMemoryCache - -__all__ = [ - "cache_manager", - "InMemoryCache", -] diff --git a/src/backend/langflow/components/__init__.py b/src/backend/langflow/components/__init__.py new file mode 100644 index 0000000000..7650422103 --- /dev/null +++ b/src/backend/langflow/components/__init__.py @@ -0,0 +1,4 @@ +from langflow.interface.custom.custom_component import CustomComponent + + +__all__ = ["CustomComponent"] diff --git a/src/backend/langflow/chat/__init__.py b/src/backend/langflow/components/chains/__init__.py similarity index 100% rename from src/backend/langflow/chat/__init__.py rename to src/backend/langflow/components/chains/__init__.py diff --git a/src/backend/langflow/components/chains/prompt_runner.py b/src/backend/langflow/components/chains/prompt_runner.py new file mode 100644 index 0000000000..141941c389 --- /dev/null +++ b/src/backend/langflow/components/chains/prompt_runner.py @@ -0,0 +1,33 @@ +from langflow import CustomComponent + +from langchain.llms.base import BaseLLM +from langchain import PromptTemplate +from langchain.schema import Document + + +class PromptRunner(CustomComponent): + display_name: str = "Prompt Runner" + description: str = "Run a Chain with the given PromptTemplate" + beta = True + field_config = { + "llm": {"display_name": "LLM"}, + "prompt": { + "display_name": "Prompt Template", + "info": "Make sure the prompt has all variables filled.", + }, + "code": {"show": False}, + "inputs": {"field_type": "code"}, + } + + def build( + self, + llm: BaseLLM, + prompt: PromptTemplate, + ) -> Document: + chain = prompt | llm + # The input is an empty dict because the prompt is already filled + result = chain.invoke({}) + if hasattr(result, "content"): + result = result.content + self.repr_value = result + return Document(page_content=str(result)) diff --git a/src/backend/langflow/database/base.py b/src/backend/langflow/database/base.py deleted file mode 100644 index 338298a6be..0000000000 --- a/src/backend/langflow/database/base.py +++ /dev/null @@ -1,51 +0,0 @@ -from contextlib import contextmanager -from langflow.settings import settings -from sqlmodel import SQLModel, Session, create_engine -from langflow.utils.logger import logger - -if settings.database_url and settings.database_url.startswith("sqlite"): - connect_args = {"check_same_thread": False} -else: - connect_args = {} -if not settings.database_url: - raise RuntimeError("No database_url provided") -engine = create_engine(settings.database_url, connect_args=connect_args) - - -def create_db_and_tables(): - logger.debug("Creating database and tables") - try: - SQLModel.metadata.create_all(engine) - except Exception as exc: - logger.error(f"Error creating database and tables: {exc}") - raise RuntimeError("Error creating database and tables") from exc - # Now check if the table Flow exists, if not, something went wrong - # and we need to create the tables again. - from sqlalchemy import inspect - - inspector = inspect(engine) - if "flow" not in inspector.get_table_names(): - logger.error("Something went wrong creating the database and tables.") - logger.error("Please check your database settings.") - - raise RuntimeError("Something went wrong creating the database and tables.") - else: - logger.debug("Database and tables created successfully") - - -@contextmanager -def session_getter(): - try: - session = Session(engine) - yield session - except Exception as e: - print("Session rollback because of exception:", e) - session.rollback() - raise - finally: - session.close() - - -def get_session(): - with session_getter() as session: - yield session diff --git a/src/backend/langflow/database/models/flow_style.py b/src/backend/langflow/database/models/flow_style.py deleted file mode 100644 index fe53799fe1..0000000000 --- a/src/backend/langflow/database/models/flow_style.py +++ /dev/null @@ -1,33 +0,0 @@ -# Path: src/backend/langflow/database/models/flowstyle.py - -from langflow.database.models.base import SQLModelSerializable -from sqlmodel import Field, Relationship -from uuid import UUID, uuid4 -from typing import TYPE_CHECKING, Optional - -if TYPE_CHECKING: - from langflow.database.models.flow import Flow - - -class FlowStyleBase(SQLModelSerializable): - color: str - emoji: str - flow_id: UUID = Field(default=None, foreign_key="flow.id") - - -class FlowStyle(FlowStyleBase, table=True): - id: UUID = Field(default_factory=uuid4, primary_key=True, unique=True) - flow: "Flow" = Relationship(back_populates="style") - - -class FlowStyleUpdate(SQLModelSerializable): - color: Optional[str] = None - emoji: Optional[str] = None - - -class FlowStyleCreate(FlowStyleBase): - pass - - -class FlowStyleRead(FlowStyleBase): - id: UUID diff --git a/src/backend/langflow/database/models/user.py b/src/backend/langflow/database/models/user.py index 6e13f3e49f..2d6b9bbf7d 100644 --- a/src/backend/langflow/database/models/user.py +++ b/src/backend/langflow/database/models/user.py @@ -1,7 +1,10 @@ from datetime import datetime from sqlalchemy.orm import Session -from langflow.database.models.base import SQLModelSerializable, SQLModel +from langflow.services.database.models.base import ( + SQLModelSerializable, + SQLModel +) from sqlmodel import Field from uuid import UUID, uuid4 diff --git a/src/backend/langflow/graph/graph/base.py b/src/backend/langflow/graph/graph/base.py index 99b4e2b3d7..f0d3986cf1 100644 --- a/src/backend/langflow/graph/graph/base.py +++ b/src/backend/langflow/graph/graph/base.py @@ -1,7 +1,7 @@ from typing import Dict, Generator, List, Type, Union from langflow.graph.edge.base import Edge -from langflow.graph.graph.constants import VERTEX_TYPE_MAP +from langflow.graph.graph.constants import lazy_load_vertex_dict from langflow.graph.vertex.base import Vertex from langflow.graph.vertex.types import ( FileToolVertex, @@ -187,10 +187,12 @@ def _get_vertex_class(self, node_type: str, node_lc_type: str) -> Type[Vertex]: """Returns the node class based on the node type.""" if node_type in FILE_TOOLS: return FileToolVertex - if node_type in VERTEX_TYPE_MAP: - return VERTEX_TYPE_MAP[node_type] + if node_type in lazy_load_vertex_dict.VERTEX_TYPE_MAP: + return lazy_load_vertex_dict.VERTEX_TYPE_MAP[node_type] return ( - VERTEX_TYPE_MAP[node_lc_type] if node_lc_type in VERTEX_TYPE_MAP else Vertex + lazy_load_vertex_dict.VERTEX_TYPE_MAP[node_lc_type] + if node_lc_type in lazy_load_vertex_dict.VERTEX_TYPE_MAP + else Vertex ) def _build_vertices(self) -> List[Vertex]: diff --git a/src/backend/langflow/graph/graph/constants.py b/src/backend/langflow/graph/graph/constants.py index 5e5c3b7091..c9fea48b53 100644 --- a/src/backend/langflow/graph/graph/constants.py +++ b/src/backend/langflow/graph/graph/constants.py @@ -1,4 +1,3 @@ -from langflow.graph.vertex.base import Vertex from langflow.graph.vertex import types from langflow.interface.agents.base import agent_creator from langflow.interface.chains.base import chain_creator @@ -15,23 +14,45 @@ from langflow.interface.output_parsers.base import output_parser_creator from langflow.interface.retrievers.base import retriever_creator from langflow.interface.custom.base import custom_component_creator -from typing import Dict, Type +from langflow.utils.lazy_load import LazyLoadDictBase -VERTEX_TYPE_MAP: Dict[str, Type[Vertex]] = { - **{t: types.PromptVertex for t in prompt_creator.to_list()}, - **{t: types.AgentVertex for t in agent_creator.to_list()}, - **{t: types.ChainVertex for t in chain_creator.to_list()}, - **{t: types.ToolVertex for t in tool_creator.to_list()}, - **{t: types.ToolkitVertex for t in toolkits_creator.to_list()}, - **{t: types.WrapperVertex for t in wrapper_creator.to_list()}, - **{t: types.LLMVertex for t in llm_creator.to_list()}, - **{t: types.MemoryVertex for t in memory_creator.to_list()}, - **{t: types.EmbeddingVertex for t in embedding_creator.to_list()}, - **{t: types.VectorStoreVertex for t in vectorstore_creator.to_list()}, - **{t: types.DocumentLoaderVertex for t in documentloader_creator.to_list()}, - **{t: types.TextSplitterVertex for t in textsplitter_creator.to_list()}, - **{t: types.OutputParserVertex for t in output_parser_creator.to_list()}, - **{t: types.CustomComponentVertex for t in custom_component_creator.to_list()}, - **{t: types.RetrieverVertex for t in retriever_creator.to_list()}, -} +class VertexTypesDict(LazyLoadDictBase): + def __init__(self): + self._all_types_dict = None + + @property + def VERTEX_TYPE_MAP(self): + return self.all_types_dict + + def _build_dict(self): + langchain_types_dict = self.get_type_dict() + return { + **langchain_types_dict, + "Custom": ["Custom Tool", "Python Function"], + } + + def get_type_dict(self): + return { + **{t: types.PromptVertex for t in prompt_creator.to_list()}, + **{t: types.AgentVertex for t in agent_creator.to_list()}, + **{t: types.ChainVertex for t in chain_creator.to_list()}, + **{t: types.ToolVertex for t in tool_creator.to_list()}, + **{t: types.ToolkitVertex for t in toolkits_creator.to_list()}, + **{t: types.WrapperVertex for t in wrapper_creator.to_list()}, + **{t: types.LLMVertex for t in llm_creator.to_list()}, + **{t: types.MemoryVertex for t in memory_creator.to_list()}, + **{t: types.EmbeddingVertex for t in embedding_creator.to_list()}, + **{t: types.VectorStoreVertex for t in vectorstore_creator.to_list()}, + **{t: types.DocumentLoaderVertex for t in documentloader_creator.to_list()}, + **{t: types.TextSplitterVertex for t in textsplitter_creator.to_list()}, + **{t: types.OutputParserVertex for t in output_parser_creator.to_list()}, + **{ + t: types.CustomComponentVertex + for t in custom_component_creator.to_list() + }, + **{t: types.RetrieverVertex for t in retriever_creator.to_list()}, + } + + +lazy_load_vertex_dict = VertexTypesDict() diff --git a/src/backend/langflow/graph/vertex/base.py b/src/backend/langflow/graph/vertex/base.py index 2c749b85f9..ac7f72b4dc 100644 --- a/src/backend/langflow/graph/vertex/base.py +++ b/src/backend/langflow/graph/vertex/base.py @@ -1,5 +1,6 @@ +import ast from langflow.interface.initialize import loading -from langflow.interface.listing import ALL_TYPES_DICT +from langflow.interface.listing import lazy_load_dict from langflow.utils.constants import DIRECT_TYPES from langflow.utils.logger import logger from langflow.utils.util import sync_to_async @@ -61,7 +62,7 @@ def _parse_data(self) -> None: ) if self.base_type is None: - for base_type, value in ALL_TYPES_DICT.items(): + for base_type, value in lazy_load_dict.ALL_TYPES_DICT.items(): if self.vertex_type in value: self.base_type = base_type break @@ -100,7 +101,9 @@ def _build_params(self): params[param_key] = edge.source for key, value in template_dict.items(): - if key == "_type" or not value.get("show"): + # Skip _type and any value that has show == False and is not code + # If we don't want to show code but we want to use it + if key == "_type" or (not value.get("show") and key != "code"): continue # If the type is not transformable to a python base class # then we need to get the edge that connects to this node @@ -112,7 +115,14 @@ def _build_params(self): params[key] = file_path elif value.get("type") in DIRECT_TYPES and params.get(key) is None: - params[key] = value.get("value") + if value.get("type") == "code": + try: + params[key] = ast.literal_eval(value.get("value")) + except Exception as exc: + logger.debug(f"Error parsing code: {exc}") + params[key] = value.get("value") + else: + params[key] = value.get("value") if not value.get("required") and params.get(key) is None: if value.get("default"): @@ -259,4 +269,8 @@ def __hash__(self) -> int: def _built_object_repr(self): # Add a message with an emoji, stars for sucess, - return "Built sucessfully ✨" if self._built_object else "Failed to build 😵‍💫" + return ( + "Built sucessfully ✨" + if self._built_object is not None + else "Failed to build 😵‍💫" + ) diff --git a/src/backend/langflow/graph/vertex/types.py b/src/backend/langflow/graph/vertex/types.py index 5aee7b14cf..b7ac179835 100644 --- a/src/backend/langflow/graph/vertex/types.py +++ b/src/backend/langflow/graph/vertex/types.py @@ -226,7 +226,11 @@ def _built_object_repr(self): # so the prompt format doesn't break artifacts.pop("handle_keys", None) try: - template = self._built_object.format(**artifacts) + template = self._built_object.template + for key, value in artifacts.items(): + if value: + replace_key = "{" + key + "}" + template = template.replace(replace_key, value) return ( template if isinstance(template, str) diff --git a/src/backend/langflow/interface/agents/base.py b/src/backend/langflow/interface/agents/base.py index b272144bc8..ec8c42aba2 100644 --- a/src/backend/langflow/interface/agents/base.py +++ b/src/backend/langflow/interface/agents/base.py @@ -5,7 +5,8 @@ from langflow.custom.customs import get_custom_nodes from langflow.interface.agents.custom import CUSTOM_AGENTS from langflow.interface.base import LangChainTypeCreator -from langflow.settings import settings +from langflow.services.utils import get_settings_manager + from langflow.template.frontend_node.agents import AgentFrontendNode from langflow.utils.logger import logger from langflow.utils.util import build_template_from_class, build_template_from_method @@ -53,13 +54,17 @@ def get_signature(self, name: str) -> Optional[Dict]: # Now this is a generator def to_list(self) -> List[str]: names = [] + settings_manager = get_settings_manager() for _, agent in self.type_to_loader_dict.items(): agent_name = ( agent.function_name() if hasattr(agent, "function_name") else agent.__name__ ) - if agent_name in settings.agents or settings.dev: + if ( + agent_name in settings_manager.settings.AGENTS + or settings_manager.settings.DEV + ): names.append(agent_name) return names diff --git a/src/backend/langflow/interface/base.py b/src/backend/langflow/interface/base.py index e6a28bf7dc..d1ed83b5a0 100644 --- a/src/backend/langflow/interface/base.py +++ b/src/backend/langflow/interface/base.py @@ -2,13 +2,14 @@ from typing import Any, Dict, List, Optional, Type, Union from langchain.chains.base import Chain from langchain.agents import AgentExecutor +from langflow.services.utils import get_settings_manager from pydantic import BaseModel from langflow.template.field.base import TemplateField from langflow.template.frontend_node.base import FrontendNode from langflow.template.template.base import Template from langflow.utils.logger import logger -from langflow.settings import settings + # Assuming necessary imports for Field, Template, and FrontendNode classes @@ -26,9 +27,12 @@ def frontend_node_class(self) -> Type[FrontendNode]: @property def docs_map(self) -> Dict[str, str]: """A dict with the name of the component as key and the documentation link as value.""" + settings_manager = get_settings_manager() if self.name_docs_dict is None: try: - type_settings = getattr(settings, self.type_name) + type_settings = getattr( + settings_manager.settings, self.type_name.upper() + ) self.name_docs_dict = { name: value_dict["documentation"] for name, value_dict in type_settings.items() diff --git a/src/backend/langflow/interface/chains/base.py b/src/backend/langflow/interface/chains/base.py index 67d31308fc..b906dbd25a 100644 --- a/src/backend/langflow/interface/chains/base.py +++ b/src/backend/langflow/interface/chains/base.py @@ -3,11 +3,13 @@ from langflow.custom.customs import get_custom_nodes from langflow.interface.base import LangChainTypeCreator from langflow.interface.importing.utils import import_class -from langflow.settings import settings +from langflow.services.utils import get_settings_manager + from langflow.template.frontend_node.chains import ChainFrontendNode from langflow.utils.logger import logger from langflow.utils.util import build_template_from_class, build_template_from_method from langchain import chains +from langchain_experimental.sql import SQLDatabaseChain # type: ignore # Assuming necessary imports for Field, Template, and FrontendNode classes @@ -29,18 +31,22 @@ def frontend_node_class(self) -> Type[ChainFrontendNode]: @property def type_to_loader_dict(self) -> Dict: if self.type_dict is None: + settings_manager = get_settings_manager() self.type_dict: dict[str, Any] = { chain_name: import_class(f"langchain.chains.{chain_name}") for chain_name in chains.__all__ } from langflow.interface.chains.custom import CUSTOM_CHAINS + self.type_dict["SQLDatabaseChain"] = SQLDatabaseChain + self.type_dict.update(CUSTOM_CHAINS) # Filter according to settings.chains self.type_dict = { name: chain for name, chain in self.type_dict.items() - if name in settings.chains or settings.dev + if name in settings_manager.settings.CHAINS + or settings_manager.settings.DEV } return self.type_dict diff --git a/src/backend/langflow/interface/custom/component.py b/src/backend/langflow/interface/custom/component.py index a9dc0f3234..d122eb6da4 100644 --- a/src/backend/langflow/interface/custom/component.py +++ b/src/backend/langflow/interface/custom/component.py @@ -1,5 +1,5 @@ import ast -from typing import Optional +from typing import Any, Optional from pydantic import BaseModel from fastapi import HTTPException @@ -63,10 +63,10 @@ def build_template_config(self, attributes) -> dict: elif "description" in item_name: template_config["description"] = ast.literal_eval(item_value) - elif "field_config" in item_name: - template_config["field_config"] = ast.literal_eval(item_value) + elif "beta" in item_name: + template_config["beta"] = ast.literal_eval(item_value) return template_config - def build(self): + def build(self, *args: Any, **kwargs: Any) -> Any: raise NotImplementedError diff --git a/src/backend/langflow/interface/custom/constants.py b/src/backend/langflow/interface/custom/constants.py index 0e747d0ca7..83cf4b463d 100644 --- a/src/backend/langflow/interface/custom/constants.py +++ b/src/backend/langflow/interface/custom/constants.py @@ -7,6 +7,7 @@ from langchain.text_splitter import TextSplitter from langchain.tools import Tool from langchain.vectorstores.base import VectorStore +from langchain.schema import BaseOutputParser LANGCHAIN_BASE_TYPES = { @@ -20,6 +21,7 @@ "VectorStore": VectorStore, "Embeddings": Embeddings, "BaseRetriever": BaseRetriever, + "BaseOutputParser": BaseOutputParser, } # Langchain base types plus Python base types diff --git a/src/backend/langflow/interface/custom/custom_component.py b/src/backend/langflow/interface/custom/custom_component.py index 4d65070bf6..5388f40d87 100644 --- a/src/backend/langflow/interface/custom/custom_component.py +++ b/src/backend/langflow/interface/custom/custom_component.py @@ -3,12 +3,14 @@ from langflow.interface.custom.constants import CUSTOM_COMPONENT_SUPPORTED_TYPES from langflow.interface.custom.component import Component from langflow.interface.custom.directory_reader import DirectoryReader +from langflow.services.utils import get_db_manager from langflow.utils import validate -from langflow.database.base import session_getter -from langflow.database.models.flow import Flow +from langflow.services.database.utils import session_getter +from langflow.services.database.models.flow import Flow from pydantic import Extra +import yaml class CustomComponent(Component, extra=Extra.allow): @@ -24,6 +26,10 @@ def __init__(self, **data): super().__init__(**data) def custom_repr(self): + if isinstance(self.repr_value, dict): + return yaml.dump(self.repr_value) + if isinstance(self.repr_value, str): + return self.repr_value return str(self.repr_value) def build_config(self): @@ -154,7 +160,8 @@ def load_flow(self, flow_id: str, tweaks: Optional[dict] = None) -> Any: from langflow.processing.process import build_sorted_vertices_with_caching from langflow.processing.process import process_tweaks - with session_getter() as session: + db_manager = get_db_manager() + with session_getter(db_manager) as session: graph_data = flow.data if (flow := session.get(Flow, flow_id)) else None if not graph_data: raise ValueError(f"Flow {flow_id} not found") @@ -164,7 +171,8 @@ def load_flow(self, flow_id: str, tweaks: Optional[dict] = None) -> Any: def list_flows(self, *, get_session: Optional[Callable] = None) -> List[Flow]: get_session = get_session or session_getter - with get_session() as session: + db_manager = get_db_manager() + with get_session(db_manager) as session: flows = session.query(Flow).all() return flows @@ -177,8 +185,8 @@ def get_flow( get_session: Optional[Callable] = None, ) -> Flow: get_session = get_session or session_getter - - with get_session() as session: + db_manager = get_db_manager() + with get_session(db_manager) as session: if flow_id: flow = session.query(Flow).get(flow_id) elif flow_name: @@ -190,5 +198,5 @@ def get_flow( raise ValueError(f"Flow {flow_name or flow_id} not found") return self.load_flow(flow.id, tweaks) - def build(self): + def build(self, *args: Any, **kwargs: Any) -> Any: raise NotImplementedError diff --git a/src/backend/langflow/interface/custom/directory_reader.py b/src/backend/langflow/interface/custom/directory_reader.py index 3357d164f9..57bb3ca959 100644 --- a/src/backend/langflow/interface/custom/directory_reader.py +++ b/src/backend/langflow/interface/custom/directory_reader.py @@ -1,6 +1,7 @@ import os import ast import zlib +from langflow.utils.logger import logger class CustomComponentPathValueError(ValueError): @@ -74,8 +75,11 @@ def filter_loaded_components(self, data: dict, with_errors: bool) -> dict: } for menu in data["menu"] ] - filtred = [menu for menu in items if menu["components"]] - return {"menu": filtred} + filtered = [menu for menu in items if menu["components"]] + logger.debug( + f'Filtered components {"with errors" if with_errors else ""}: {filtered}' + ) + return {"menu": filtered} def validate_code(self, file_content): """ @@ -116,7 +120,7 @@ def get_files(self): file_list.extend( os.path.join(root, filename) for filename in files - if filename.endswith(".py") + if filename.endswith(".py") and not filename.startswith("__") ) return file_list @@ -213,27 +217,47 @@ def build_component_menu_list(self, file_paths): from the .py files in the directory. """ response = {"menu": []} + logger.debug( + "-------------------- Building component menu list --------------------" + ) for file_path in file_paths: menu_name = os.path.basename(os.path.dirname(file_path)) + logger.debug(f"Menu name: {menu_name}") filename = os.path.basename(file_path) validation_result, result_content = self.process_file(file_path) + logger.debug(f"Validation result: {validation_result}") menu_result = self.find_menu(response, menu_name) or { "name": menu_name, "path": os.path.dirname(file_path), "components": [], } + component_name = filename.split(".")[0] + # This is the name of the file which will be displayed in the UI + # We need to change it from snake_case to CamelCase + + # first check if it's already CamelCase + if "_" in component_name: + component_name_camelcase = " ".join( + word.title() for word in component_name.split("_") + ) + else: + component_name_camelcase = component_name component_info = { - "name": filename.split(".")[0], + "name": "CustomComponent", + "output_types": [component_name_camelcase], "file": filename, "code": result_content if validation_result else "", "error": "" if validation_result else result_content, } menu_result["components"].append(component_info) + logger.debug(f"Component info: {component_info}") if menu_result not in response["menu"]: response["menu"].append(menu_result) - + logger.debug( + "-------------------- Component menu list built --------------------" + ) return response diff --git a/src/backend/langflow/interface/document_loaders/base.py b/src/backend/langflow/interface/document_loaders/base.py index 5219fbd13b..db0832ff3e 100644 --- a/src/backend/langflow/interface/document_loaders/base.py +++ b/src/backend/langflow/interface/document_loaders/base.py @@ -1,9 +1,10 @@ from typing import Dict, List, Optional, Type from langflow.interface.base import LangChainTypeCreator +from langflow.services.utils import get_settings_manager from langflow.template.frontend_node.documentloaders import DocumentLoaderFrontNode from langflow.interface.custom_lists import documentloaders_type_to_cls_dict -from langflow.settings import settings + from langflow.utils.logger import logger from langflow.utils.util import build_template_from_class @@ -30,10 +31,12 @@ def get_signature(self, name: str) -> Optional[Dict]: return None def to_list(self) -> List[str]: + settings_manager = get_settings_manager() return [ documentloader.__name__ for documentloader in self.type_to_loader_dict.values() - if documentloader.__name__ in settings.documentloaders or settings.dev + if documentloader.__name__ in settings_manager.settings.DOCUMENTLOADERS + or settings_manager.settings.DEV ] diff --git a/src/backend/langflow/interface/embeddings/base.py b/src/backend/langflow/interface/embeddings/base.py index 1dfa05a994..169985d378 100644 --- a/src/backend/langflow/interface/embeddings/base.py +++ b/src/backend/langflow/interface/embeddings/base.py @@ -2,7 +2,8 @@ from langflow.interface.base import LangChainTypeCreator from langflow.interface.custom_lists import embedding_type_to_cls_dict -from langflow.settings import settings +from langflow.services.utils import get_settings_manager + from langflow.template.frontend_node.base import FrontendNode from langflow.template.frontend_node.embeddings import EmbeddingFrontendNode from langflow.utils.logger import logger @@ -32,10 +33,12 @@ def get_signature(self, name: str) -> Optional[Dict]: return None def to_list(self) -> List[str]: + settings_manager = get_settings_manager() return [ embedding.__name__ for embedding in self.type_to_loader_dict.values() - if embedding.__name__ in settings.embeddings or settings.dev + if embedding.__name__ in settings_manager.settings.EMBEDDINGS + or settings_manager.settings.DEV ] diff --git a/src/backend/langflow/interface/importing/utils.py b/src/backend/langflow/interface/importing/utils.py index 0acb2cff50..d07222dd18 100644 --- a/src/backend/langflow/interface/importing/utils.py +++ b/src/backend/langflow/interface/importing/utils.py @@ -61,9 +61,7 @@ def import_by_type(_type: str, name: str) -> Any: def import_custom_component(custom_component: str) -> CustomComponent: """Import custom component from custom component name""" - return import_class( - f"langflow.interface.custom.custom_component.{custom_component}" - ) + return import_class("langflow.interface.custom.custom_component.CustomComponent") def import_output_parser(output_parser: str) -> Any: diff --git a/src/backend/langflow/interface/initialize/loading.py b/src/backend/langflow/interface/initialize/loading.py index f52b2e56c5..e72e5091b0 100644 --- a/src/backend/langflow/interface/initialize/loading.py +++ b/src/backend/langflow/interface/initialize/loading.py @@ -6,7 +6,11 @@ from langchain.agents.agent_toolkits.base import BaseToolkit from langchain.agents.tools import BaseTool from langflow.interface.initialize.llm import initialize_vertexai -from langflow.interface.initialize.utils import handle_format_kwargs, handle_node_type +from langflow.interface.initialize.utils import ( + handle_format_kwargs, + handle_node_type, + handle_partial_variables, +) from langflow.interface.initialize.vector_store import vecstore_initializer @@ -29,6 +33,7 @@ from langchain.chains.base import Chain from langchain.vectorstores.base import VectorStore from langchain.document_loaders.base import BaseLoader +from langflow.utils.logger import logger def instantiate_class(node_type: str, base_type: str, params: Dict) -> Any: @@ -40,7 +45,7 @@ def instantiate_class(node_type: str, base_type: str, params: Dict) -> Any: if hasattr(custom_node, "initialize"): return custom_node.initialize(**params) return custom_node(**params) - + logger.debug(f"Instantiating {node_type} of type {base_type}") class_object = import_by_type(_type=base_type, name=node_type) return instantiate_based_on_type(class_object, base_type, node_type, params) @@ -217,6 +222,9 @@ def instantiate_agent(node_type, class_object: Type[agent_module.Agent], params: def instantiate_prompt(node_type, class_object, params: Dict): params, prompt = handle_node_type(node_type, class_object, params) format_kwargs = handle_format_kwargs(prompt, params) + # Now we'll use partial_format to format the prompt + if format_kwargs: + prompt = handle_partial_variables(prompt, format_kwargs) return prompt, format_kwargs diff --git a/src/backend/langflow/interface/initialize/utils.py b/src/backend/langflow/interface/initialize/utils.py index 31fbc6d8b5..976d8906c1 100644 --- a/src/backend/langflow/interface/initialize/utils.py +++ b/src/backend/langflow/interface/initialize/utils.py @@ -44,6 +44,16 @@ def handle_format_kwargs(prompt, params: Dict): return format_kwargs +def handle_partial_variables(prompt, format_kwargs: Dict): + partial_variables = format_kwargs.copy() + partial_variables = { + key: value for key, value in partial_variables.items() if value + } + # Remove handle_keys otherwise LangChain raises an error + partial_variables.pop("handle_keys", None) + return prompt.partial(**partial_variables) + + def handle_variable(params: Dict, input_variable: str, format_kwargs: Dict): variable = params[input_variable] if isinstance(variable, str): diff --git a/src/backend/langflow/interface/initialize/vector_store.py b/src/backend/langflow/interface/initialize/vector_store.py index d4bdb01558..8330d76112 100644 --- a/src/backend/langflow/interface/initialize/vector_store.py +++ b/src/backend/langflow/interface/initialize/vector_store.py @@ -170,6 +170,26 @@ def initialize_pinecone(class_object: Type[Pinecone], params: dict): def initialize_chroma(class_object: Type[Chroma], params: dict): """Initialize a ChromaDB object from the params""" + if ( # type: ignore + "chroma_server_host" in params or "chroma_server_http_port" in params + ): + import chromadb # type: ignore + + settings_params = { + key: params[key] + for key, value_ in params.items() + if key.startswith("chroma_server_") and value_ + } + chroma_settings = chromadb.config.Settings(**settings_params) + params["client_settings"] = chroma_settings + else: + # remove all chroma_server_ keys from params + params = { + key: value + for key, value in params.items() + if not key.startswith("chroma_server_") + } + persist = params.pop("persist", False) if not docs_in_params(params): params.pop("documents", None) diff --git a/src/backend/langflow/interface/listing.py b/src/backend/langflow/interface/listing.py index fe3090f658..1cab1efbcd 100644 --- a/src/backend/langflow/interface/listing.py +++ b/src/backend/langflow/interface/listing.py @@ -14,34 +14,43 @@ from langflow.interface.output_parsers.base import output_parser_creator from langflow.interface.retrievers.base import retriever_creator from langflow.interface.custom.base import custom_component_creator +from langflow.utils.lazy_load import LazyLoadDictBase -def get_type_dict(): - return { - "agents": agent_creator.to_list(), - "prompts": prompt_creator.to_list(), - "llms": llm_creator.to_list(), - "tools": tool_creator.to_list(), - "chains": chain_creator.to_list(), - "memory": memory_creator.to_list(), - "toolkits": toolkits_creator.to_list(), - "wrappers": wrapper_creator.to_list(), - "documentLoaders": documentloader_creator.to_list(), - "vectorStore": vectorstore_creator.to_list(), - "embeddings": embedding_creator.to_list(), - "textSplitters": textsplitter_creator.to_list(), - "utilities": utility_creator.to_list(), - "outputParsers": output_parser_creator.to_list(), - "retrievers": retriever_creator.to_list(), - "custom_components": custom_component_creator.to_list(), - } +class AllTypesDict(LazyLoadDictBase): + def __init__(self): + self._all_types_dict = None + @property + def ALL_TYPES_DICT(self): + return self.all_types_dict -LANGCHAIN_TYPES_DICT = get_type_dict() + def _build_dict(self): + langchain_types_dict = self.get_type_dict() + return { + **langchain_types_dict, + "Custom": ["Custom Tool", "Python Function"], + } -# Now we'll build a dict with Langchain types and ours + def get_type_dict(self): + return { + "agents": agent_creator.to_list(), + "prompts": prompt_creator.to_list(), + "llms": llm_creator.to_list(), + "tools": tool_creator.to_list(), + "chains": chain_creator.to_list(), + "memory": memory_creator.to_list(), + "toolkits": toolkits_creator.to_list(), + "wrappers": wrapper_creator.to_list(), + "documentLoaders": documentloader_creator.to_list(), + "vectorStore": vectorstore_creator.to_list(), + "embeddings": embedding_creator.to_list(), + "textSplitters": textsplitter_creator.to_list(), + "utilities": utility_creator.to_list(), + "outputParsers": output_parser_creator.to_list(), + "retrievers": retriever_creator.to_list(), + "custom_components": custom_component_creator.to_list(), + } -ALL_TYPES_DICT = { - **LANGCHAIN_TYPES_DICT, - "Custom": ["Custom Tool", "Python Function"], -} + +lazy_load_dict = AllTypesDict() diff --git a/src/backend/langflow/interface/llms/base.py b/src/backend/langflow/interface/llms/base.py index 66e153880d..f562b99ed4 100644 --- a/src/backend/langflow/interface/llms/base.py +++ b/src/backend/langflow/interface/llms/base.py @@ -2,7 +2,8 @@ from langflow.interface.base import LangChainTypeCreator from langflow.interface.custom_lists import llm_type_to_cls_dict -from langflow.settings import settings +from langflow.services.utils import get_settings_manager + from langflow.template.frontend_node.llms import LLMFrontendNode from langflow.utils.logger import logger from langflow.utils.util import build_template_from_class @@ -33,10 +34,12 @@ def get_signature(self, name: str) -> Optional[Dict]: return None def to_list(self) -> List[str]: + settings_manager = get_settings_manager() return [ llm.__name__ for llm in self.type_to_loader_dict.values() - if llm.__name__ in settings.llms or settings.dev + if llm.__name__ in settings_manager.settings.LLMS + or settings_manager.settings.DEV ] diff --git a/src/backend/langflow/interface/memories/base.py b/src/backend/langflow/interface/memories/base.py index 0f97a02fe4..70665602c1 100644 --- a/src/backend/langflow/interface/memories/base.py +++ b/src/backend/langflow/interface/memories/base.py @@ -2,7 +2,8 @@ from langflow.interface.base import LangChainTypeCreator from langflow.interface.custom_lists import memory_type_to_cls_dict -from langflow.settings import settings +from langflow.services.utils import get_settings_manager + from langflow.template.frontend_node.base import FrontendNode from langflow.template.frontend_node.memories import MemoryFrontendNode from langflow.utils.logger import logger @@ -48,10 +49,12 @@ def get_signature(self, name: str) -> Optional[Dict]: return None def to_list(self) -> List[str]: + settings_manager = get_settings_manager() return [ memory.__name__ for memory in self.type_to_loader_dict.values() - if memory.__name__ in settings.memories or settings.dev + if memory.__name__ in settings_manager.settings.MEMORIES + or settings_manager.settings.DEV ] diff --git a/src/backend/langflow/interface/output_parsers/base.py b/src/backend/langflow/interface/output_parsers/base.py index 79cbdd98cc..256b521e19 100644 --- a/src/backend/langflow/interface/output_parsers/base.py +++ b/src/backend/langflow/interface/output_parsers/base.py @@ -4,7 +4,8 @@ from langflow.interface.base import LangChainTypeCreator from langflow.interface.importing.utils import import_class -from langflow.settings import settings +from langflow.services.utils import get_settings_manager + from langflow.template.frontend_node.output_parsers import OutputParserFrontendNode from langflow.utils.logger import logger from langflow.utils.util import build_template_from_class, build_template_from_method @@ -23,6 +24,7 @@ def frontend_node_class(self) -> Type[OutputParserFrontendNode]: @property def type_to_loader_dict(self) -> Dict: if self.type_dict is None: + settings_manager = get_settings_manager() self.type_dict = { output_parser_name: import_class( f"langchain.output_parsers.{output_parser_name}" @@ -33,7 +35,8 @@ def type_to_loader_dict(self) -> Dict: self.type_dict = { name: output_parser for name, output_parser in self.type_dict.items() - if name in settings.output_parsers or settings.dev + if name in settings_manager.settings.OUTPUT_PARSERS + or settings_manager.settings.DEV } return self.type_dict diff --git a/src/backend/langflow/interface/prompts/base.py b/src/backend/langflow/interface/prompts/base.py index 39bd94c5b9..5aa41dfb28 100644 --- a/src/backend/langflow/interface/prompts/base.py +++ b/src/backend/langflow/interface/prompts/base.py @@ -5,7 +5,8 @@ from langflow.custom.customs import get_custom_nodes from langflow.interface.base import LangChainTypeCreator from langflow.interface.importing.utils import import_class -from langflow.settings import settings +from langflow.services.utils import get_settings_manager + from langflow.template.frontend_node.prompts import PromptFrontendNode from langflow.utils.logger import logger from langflow.utils.util import build_template_from_class @@ -20,6 +21,7 @@ def frontend_node_class(self) -> Type[PromptFrontendNode]: @property def type_to_loader_dict(self) -> Dict: + settings_manager = get_settings_manager() if self.type_dict is None: self.type_dict = { prompt_name: import_class(f"langchain.prompts.{prompt_name}") @@ -34,7 +36,8 @@ def type_to_loader_dict(self) -> Dict: self.type_dict = { name: prompt for name, prompt in self.type_dict.items() - if name in settings.prompts or settings.dev + if name in settings_manager.settings.PROMPTS + or settings_manager.settings.DEV } return self.type_dict diff --git a/src/backend/langflow/interface/retrievers/base.py b/src/backend/langflow/interface/retrievers/base.py index dc60566564..db1cfd1654 100644 --- a/src/backend/langflow/interface/retrievers/base.py +++ b/src/backend/langflow/interface/retrievers/base.py @@ -4,7 +4,8 @@ from langflow.interface.base import LangChainTypeCreator from langflow.interface.importing.utils import import_class -from langflow.settings import settings +from langflow.services.utils import get_settings_manager + from langflow.template.frontend_node.retrievers import RetrieverFrontendNode from langflow.utils.logger import logger from langflow.utils.util import build_template_from_method, build_template_from_class @@ -48,10 +49,12 @@ def get_signature(self, name: str) -> Optional[Dict]: return None def to_list(self) -> List[str]: + settings_manager = get_settings_manager() return [ retriever for retriever in self.type_to_loader_dict.keys() - if retriever in settings.retrievers or settings.dev + if retriever in settings_manager.settings.RETRIEVERS + or settings_manager.settings.DEV ] diff --git a/src/backend/langflow/interface/run.py b/src/backend/langflow/interface/run.py index 97f47334ed..cb0573bf7c 100644 --- a/src/backend/langflow/interface/run.py +++ b/src/backend/langflow/interface/run.py @@ -1,4 +1,4 @@ -from langflow.cache.utils import memoize_dict +from langflow.services.cache.utils import memoize_dict from langflow.graph import Graph from langflow.utils.logger import logger diff --git a/src/backend/langflow/interface/text_splitters/base.py b/src/backend/langflow/interface/text_splitters/base.py index 203f300869..87b778c4c0 100644 --- a/src/backend/langflow/interface/text_splitters/base.py +++ b/src/backend/langflow/interface/text_splitters/base.py @@ -1,9 +1,10 @@ from typing import Dict, List, Optional, Type from langflow.interface.base import LangChainTypeCreator +from langflow.services.utils import get_settings_manager from langflow.template.frontend_node.textsplitters import TextSplittersFrontendNode from langflow.interface.custom_lists import textsplitter_type_to_cls_dict -from langflow.settings import settings + from langflow.utils.logger import logger from langflow.utils.util import build_template_from_class @@ -30,10 +31,12 @@ def get_signature(self, name: str) -> Optional[Dict]: return None def to_list(self) -> List[str]: + settings_manager = get_settings_manager() return [ textsplitter.__name__ for textsplitter in self.type_to_loader_dict.values() - if textsplitter.__name__ in settings.textsplitters or settings.dev + if textsplitter.__name__ in settings_manager.settings.TEXTSPLITTERS + or settings_manager.settings.DEV ] diff --git a/src/backend/langflow/interface/toolkits/base.py b/src/backend/langflow/interface/toolkits/base.py index be2345c02e..c13ffdbd9e 100644 --- a/src/backend/langflow/interface/toolkits/base.py +++ b/src/backend/langflow/interface/toolkits/base.py @@ -4,7 +4,8 @@ from langflow.interface.base import LangChainTypeCreator from langflow.interface.importing.utils import import_class, import_module -from langflow.settings import settings +from langflow.services.utils import get_settings_manager + from langflow.utils.logger import logger from langflow.utils.util import build_template_from_class @@ -29,13 +30,15 @@ class ToolkitCreator(LangChainTypeCreator): @property def type_to_loader_dict(self) -> Dict: if self.type_dict is None: + settings_manager = get_settings_manager() self.type_dict = { toolkit_name: import_class( f"langchain.agents.agent_toolkits.{toolkit_name}" ) # if toolkit_name is not lower case it is a class for toolkit_name in agent_toolkits.__all__ - if not toolkit_name.islower() and toolkit_name in settings.toolkits + if not toolkit_name.islower() + and toolkit_name in settings_manager.settings.TOOLKITS } return self.type_dict diff --git a/src/backend/langflow/interface/tools/base.py b/src/backend/langflow/interface/tools/base.py index f8965134d7..1dbc9a6ed7 100644 --- a/src/backend/langflow/interface/tools/base.py +++ b/src/backend/langflow/interface/tools/base.py @@ -15,7 +15,8 @@ OTHER_TOOLS, ) from langflow.interface.tools.util import get_tool_params -from langflow.settings import settings +from langflow.services.utils import get_settings_manager + from langflow.template.field.base import TemplateField from langflow.template.template.base import Template from langflow.utils import util @@ -66,6 +67,7 @@ class ToolCreator(LangChainTypeCreator): @property def type_to_loader_dict(self) -> Dict: + settings_manager = get_settings_manager() if self.tools_dict is None: all_tools = {} @@ -74,7 +76,10 @@ def type_to_loader_dict(self) -> Dict: tool_name = tool_params.get("name") or tool - if tool_name in settings.tools or settings.dev: + if ( + tool_name in settings_manager.settings.TOOLS + or settings_manager.settings.DEV + ): if tool_name == "JsonSpec": tool_params["path"] = tool_params.pop("dict_") # type: ignore all_tools[tool_name] = { diff --git a/src/backend/langflow/interface/types.py b/src/backend/langflow/interface/types.py index 46fa781d69..76dc144a02 100644 --- a/src/backend/langflow/interface/types.py +++ b/src/backend/langflow/interface/types.py @@ -1,4 +1,7 @@ +import ast +import contextlib from typing import Any +from langflow.api.utils import merge_nested_dicts_with_renaming from langflow.interface.agents.base import agent_creator from langflow.interface.chains.base import chain_creator from langflow.interface.custom.constants import CUSTOM_COMPONENT_SUPPORTED_TYPES @@ -28,7 +31,6 @@ from langflow.interface.custom.directory_reader import DirectoryReader from langflow.utils.logger import logger from langflow.utils.util import get_base_classes -from langflow.api.utils import merge_nested_dicts import re import warnings @@ -145,7 +147,7 @@ def add_code_field(template, raw_code, field_config): "dynamic": True, "required": True, "placeholder": "", - "show": True, + "show": field_config.pop("show", True), "multiline": True, "value": raw_code, "password": False, @@ -186,7 +188,7 @@ def build_frontend_node(custom_component: CustomComponent): return None -def update_display_name_and_description(frontend_node, template_config): +def update_attributes(frontend_node, template_config): """Update the display name and description of a frontend node""" if "display_name" in template_config: frontend_node["display_name"] = template_config["display_name"] @@ -194,6 +196,9 @@ def update_display_name_and_description(frontend_node, template_config): if "description" in template_config: frontend_node["description"] = template_config["description"] + if "beta" in template_config: + frontend_node["beta"] = template_config["beta"] + def build_field_config(custom_component: CustomComponent): """Build the field configuration for a custom component""" @@ -247,6 +252,8 @@ def get_field_properties(extra_field): if not field_required: field_type = extract_type_from_optional(field_type) + with contextlib.suppress(Exception): + field_value = ast.literal_eval(field_value) return field_name, field_type, field_value, field_required @@ -274,28 +281,30 @@ def add_base_classes(frontend_node, return_type): def build_langchain_template_custom_component(custom_component: CustomComponent): """Build a custom component template for the langchain""" + logger.debug("Building custom component template") frontend_node = build_frontend_node(custom_component) if frontend_node is None: return None - + logger.debug("Built base frontend node") template_config = custom_component.build_template_config - update_display_name_and_description(frontend_node, template_config) - + update_attributes(frontend_node, template_config) + logger.debug("Updated attributes") field_config = build_field_config(custom_component) + logger.debug("Built field config") add_extra_fields( frontend_node, field_config, custom_component.get_function_entrypoint_args ) - + logger.debug("Added extra fields") frontend_node = add_code_field( frontend_node, custom_component.code, field_config.get("code", {}) ) - + logger.debug("Added code field") add_base_classes( frontend_node, custom_component.get_function_entrypoint_return_type ) - + logger.debug("Added base classes") return frontend_node @@ -306,7 +315,7 @@ def load_files_from_path(path: str): return reader.get_files() -def build_and_validate_all_files(reader, file_list): +def build_and_validate_all_files(reader: DirectoryReader, file_list): """Build and validate all files""" data = reader.build_component_menu_list(file_list) @@ -319,31 +328,53 @@ def build_and_validate_all_files(reader, file_list): def build_valid_menu(valid_components): """Build the valid menu""" valid_menu = {} + logger.debug("------------------- VALID COMPONENTS -------------------") for menu_item in valid_components["menu"]: menu_name = menu_item["name"] valid_menu[menu_name] = {} for component in menu_item["components"]: + logger.debug(f"Building component: {component}") try: component_name = component["name"] component_code = component["code"] + component_output_types = component["output_types"] component_extractor = CustomComponent(code=component_code) component_extractor.is_check_valid() + component_template = build_langchain_template_custom_component( component_extractor ) + component_template["output_types"] = component_output_types + if len(component_output_types) == 1: + component_name = component_output_types[0] + else: + file_name = component.get("file").split(".")[0] + if "_" in file_name: + # turn .py file into camelcase + component_name = "".join( + [word.capitalize() for word in file_name.split("_")] + ) + else: + component_name = file_name valid_menu[menu_name][component_name] = component_template + logger.debug(f"Added {component_name} to valid menu to {menu_name}") except Exception as exc: - logger.error(f"Error while building custom component: {exc}") + logger.error(f"Error loading Component: {component['output_types']}") + logger.exception( + f"Error while building custom component {component_output_types}: {exc}" + ) return valid_menu def build_invalid_menu(invalid_components): """Build the invalid menu""" + if invalid_components.get("menu"): + logger.debug("------------------- INVALID COMPONENTS -------------------") invalid_menu = {} for menu_item in invalid_components["menu"]: menu_name = menu_item["name"] @@ -364,12 +395,16 @@ def build_invalid_menu(invalid_components): ) component_template["error"] = component.get("error", None) + logger.debug(component) + logger.debug(f"Component Path: {component.get('path', None)}") + logger.debug(f"Component Error: {component.get('error', None)}") component_template.get("template").get("code")["value"] = component_code invalid_menu[menu_name][component_name] = component_template + logger.debug(f"Added {component_name} to invalid menu to {menu_name}") except Exception as exc: - logger.error( + logger.exception( f"Error while creating custom component [{component_name}]: {str(exc)}" ) @@ -388,4 +423,4 @@ def build_langchain_custom_component_list_from_path(path: str): valid_menu = build_valid_menu(valid_components) invalid_menu = build_invalid_menu(invalid_components) - return merge_nested_dicts(valid_menu, invalid_menu) + return merge_nested_dicts_with_renaming(valid_menu, invalid_menu) diff --git a/src/backend/langflow/interface/utilities/base.py b/src/backend/langflow/interface/utilities/base.py index 6c12b0186f..eb8cd60af3 100644 --- a/src/backend/langflow/interface/utilities/base.py +++ b/src/backend/langflow/interface/utilities/base.py @@ -5,7 +5,8 @@ from langflow.custom.customs import get_custom_nodes from langflow.interface.base import LangChainTypeCreator from langflow.interface.importing.utils import import_class -from langflow.settings import settings +from langflow.services.utils import get_settings_manager + from langflow.template.frontend_node.utilities import UtilitiesFrontendNode from langflow.utils.logger import logger from langflow.utils.util import build_template_from_class @@ -26,6 +27,7 @@ def type_to_loader_dict(self) -> Dict: from the langchain.chains module and filtering them according to the settings.utilities list. """ if self.type_dict is None: + settings_manager = get_settings_manager() self.type_dict = { utility_name: import_class(f"langchain.utilities.{utility_name}") for utility_name in utilities.__all__ @@ -35,7 +37,8 @@ def type_to_loader_dict(self) -> Dict: self.type_dict = { name: utility for name, utility in self.type_dict.items() - if name in settings.utilities or settings.dev + if name in settings_manager.settings.UTILITIES + or settings_manager.settings.DEV } return self.type_dict diff --git a/src/backend/langflow/interface/utils.py b/src/backend/langflow/interface/utils.py index 9203915cfd..1fddbf80f7 100644 --- a/src/backend/langflow/interface/utils.py +++ b/src/backend/langflow/interface/utils.py @@ -9,7 +9,8 @@ from langchain.base_language import BaseLanguageModel from PIL.Image import Image from langflow.utils.logger import logger -from langflow.chat.config import ChatConfig +from langflow.services.chat.config import ChatConfig +from langflow.services.utils import get_settings_manager def load_file_into_dict(file_path: str) -> dict: @@ -63,24 +64,21 @@ def extract_input_variables_from_prompt(prompt: str) -> list[str]: def setup_llm_caching(): """Setup LLM caching.""" - - from langflow.settings import settings - + settings_manager = get_settings_manager() try: - set_langchain_cache(settings) + set_langchain_cache(settings_manager.settings) except ImportError: - logger.warning(f"Could not import {settings.cache}. ") + logger.warning(f"Could not import {settings_manager.settings.CACHE}. ") except Exception as exc: logger.warning(f"Could not setup LLM caching. Error: {exc}") -# TODO Rename this here and in `setup_llm_caching` def set_langchain_cache(settings): import langchain from langflow.interface.importing.utils import import_class cache_type = os.getenv("LANGFLOW_LANGCHAIN_CACHE") - cache_class = import_class(f"langchain.cache.{cache_type or settings.cache}") + cache_class = import_class(f"langchain.cache.{cache_type or settings.CACHE}") logger.debug(f"Setting up LLM caching with {cache_class.__name__}") langchain.llm_cache = cache_class() diff --git a/src/backend/langflow/interface/vector_store/base.py b/src/backend/langflow/interface/vector_store/base.py index 7ec1e0f5b8..4b8ca2b641 100644 --- a/src/backend/langflow/interface/vector_store/base.py +++ b/src/backend/langflow/interface/vector_store/base.py @@ -4,7 +4,8 @@ from langflow.interface.base import LangChainTypeCreator from langflow.interface.importing.utils import import_class -from langflow.settings import settings +from langflow.services.utils import get_settings_manager + from langflow.template.frontend_node.vectorstores import VectorStoreFrontendNode from langflow.utils.logger import logger from langflow.utils.util import build_template_from_method @@ -43,10 +44,12 @@ def get_signature(self, name: str) -> Optional[Dict]: return None def to_list(self) -> List[str]: + settings_manager = get_settings_manager() return [ vectorstore for vectorstore in self.type_to_loader_dict.keys() - if vectorstore in settings.vectorstores or settings.dev + if vectorstore in settings_manager.settings.VECTORSTORES + or settings_manager.settings.DEV ] diff --git a/src/backend/langflow/main.py b/src/backend/langflow/main.py index fed302603c..e9e3dc3a1b 100644 --- a/src/backend/langflow/main.py +++ b/src/backend/langflow/main.py @@ -7,13 +7,16 @@ from langflow.api import router from langflow.routers import login, users, health -from langflow.database.base import create_db_and_tables + from langflow.interface.utils import setup_llm_caching +from langflow.services.database.utils import initialize_database +from langflow.services.manager import initialize_services from langflow.utils.logger import configure def create_app(): """Create the FastAPI app and include the router.""" + configure() app = FastAPI() @@ -31,9 +34,11 @@ def create_app(): app.include_router(login.router) app.include_router(users.router) app.include_router(health.router) + app.include_router(router) - app.on_event("startup")(create_db_and_tables) + app.on_event("startup")(initialize_services) + app.on_event("startup")(initialize_database) app.on_event("startup")(setup_llm_caching) return app @@ -66,16 +71,20 @@ def get_static_files_dir(): return frontend_path / "frontend" -def setup_app(static_files_dir: Optional[Path] = None) -> FastAPI: +def setup_app( + static_files_dir: Optional[Path] = None, backend_only: bool = False +) -> FastAPI: """Setup the FastAPI app.""" # get the directory of the current file if not static_files_dir: static_files_dir = get_static_files_dir() - if not static_files_dir or not static_files_dir.exists(): - raise RuntimeError(f"Static files directory {static_files_dir} does not exist.") + if not backend_only and (not static_files_dir or not static_files_dir.exists()): + raise RuntimeError( + f"Static files directory {static_files_dir} does not exist.") app = create_app() - setup_static_files(app, static_files_dir) + if not backend_only and static_files_dir is not None: + setup_static_files(app, static_files_dir) return app diff --git a/src/backend/langflow/routers/login.py b/src/backend/langflow/routers/login.py index 47839f6f5a..1b889cdadd 100644 --- a/src/backend/langflow/routers/login.py +++ b/src/backend/langflow/routers/login.py @@ -10,7 +10,8 @@ ) from sqlalchemy.orm import Session -from langflow.database.base import get_session +from langflow.services.utils import get_session + router = APIRouter() @@ -18,7 +19,8 @@ def create_user_token(user: str) -> dict: access_token_expires = timedelta(minutes=ACCESS_TOKEN_EXPIRE_MINUTES) access_token = create_access_token( - data={"sub": user.username}, expires_delta=access_token_expires # type: ignore + # type: ignore + data={"sub": user.username}, expires_delta=access_token_expires ) return {"access_token": access_token, "token_type": "bearer"} diff --git a/src/backend/langflow/routers/users.py b/src/backend/langflow/routers/users.py index f34199d5dc..ee2f6a93c8 100644 --- a/src/backend/langflow/routers/users.py +++ b/src/backend/langflow/routers/users.py @@ -3,7 +3,7 @@ from sqlalchemy.exc import IntegrityError from fastapi import APIRouter, Depends, HTTPException -from langflow.database.base import get_session +from langflow.services.utils import get_session from langflow.auth.auth import get_current_active_user from langflow.database.models.user import UserAddModel, UserListModel, User diff --git a/src/backend/langflow/services/__init__.py b/src/backend/langflow/services/__init__.py new file mode 100644 index 0000000000..8ac74b5b91 --- /dev/null +++ b/src/backend/langflow/services/__init__.py @@ -0,0 +1,4 @@ +from .manager import service_manager +from .schema import ServiceType + +__all__ = ["service_manager", "ServiceType"] diff --git a/src/backend/langflow/services/base.py b/src/backend/langflow/services/base.py new file mode 100644 index 0000000000..6bca6c4e29 --- /dev/null +++ b/src/backend/langflow/services/base.py @@ -0,0 +1,2 @@ +class Service: + name: str diff --git a/src/backend/langflow/services/cache/__init__.py b/src/backend/langflow/services/cache/__init__.py new file mode 100644 index 0000000000..79e1438077 --- /dev/null +++ b/src/backend/langflow/services/cache/__init__.py @@ -0,0 +1,11 @@ +from . import factory, manager +from langflow.services.cache.manager import cache_manager +from langflow.services.cache.flow import InMemoryCache + + +__all__ = [ + "cache_manager", + "factory", + "manager", + "InMemoryCache", +] diff --git a/src/backend/langflow/cache/base.py b/src/backend/langflow/services/cache/base.py similarity index 100% rename from src/backend/langflow/cache/base.py rename to src/backend/langflow/services/cache/base.py diff --git a/src/backend/langflow/services/cache/factory.py b/src/backend/langflow/services/cache/factory.py new file mode 100644 index 0000000000..77f8d58d14 --- /dev/null +++ b/src/backend/langflow/services/cache/factory.py @@ -0,0 +1,11 @@ +from langflow.services.cache.manager import CacheManager +from langflow.services.factory import ServiceFactory + + +class CacheManagerFactory(ServiceFactory): + def __init__(self): + super().__init__(CacheManager) + + def create(self, settings_service): + # Here you would have logic to create and configure a CacheManager + return CacheManager() diff --git a/src/backend/langflow/cache/flow.py b/src/backend/langflow/services/cache/flow.py similarity index 98% rename from src/backend/langflow/cache/flow.py rename to src/backend/langflow/services/cache/flow.py index 6d8fee9779..0c10c51e14 100644 --- a/src/backend/langflow/cache/flow.py +++ b/src/backend/langflow/services/cache/flow.py @@ -2,7 +2,7 @@ import time from collections import OrderedDict -from langflow.cache.base import BaseCache +from langflow.services.cache.base import BaseCache class InMemoryCache(BaseCache): diff --git a/src/backend/langflow/cache/manager.py b/src/backend/langflow/services/cache/manager.py similarity index 97% rename from src/backend/langflow/cache/manager.py rename to src/backend/langflow/services/cache/manager.py index 13b2810080..ce9a338efd 100644 --- a/src/backend/langflow/cache/manager.py +++ b/src/backend/langflow/services/cache/manager.py @@ -1,5 +1,6 @@ from contextlib import contextmanager from typing import Any, Awaitable, Callable, List, Optional +from langflow.services.base import Service import pandas as pd from PIL import Image @@ -49,9 +50,11 @@ async def notify(self): await observer() -class CacheManager(Subject): +class CacheManager(Subject, Service): """Manages cache for different clients and notifies observers on changes.""" + name = "cache_manager" + def __init__(self): super().__init__() self._cache = {} diff --git a/src/backend/langflow/cache/utils.py b/src/backend/langflow/services/cache/utils.py similarity index 100% rename from src/backend/langflow/cache/utils.py rename to src/backend/langflow/services/cache/utils.py diff --git a/src/backend/langflow/database/__init__.py b/src/backend/langflow/services/chat/__init__.py similarity index 100% rename from src/backend/langflow/database/__init__.py rename to src/backend/langflow/services/chat/__init__.py diff --git a/src/backend/langflow/chat/config.py b/src/backend/langflow/services/chat/config.py similarity index 100% rename from src/backend/langflow/chat/config.py rename to src/backend/langflow/services/chat/config.py diff --git a/src/backend/langflow/services/chat/factory.py b/src/backend/langflow/services/chat/factory.py new file mode 100644 index 0000000000..03597ed112 --- /dev/null +++ b/src/backend/langflow/services/chat/factory.py @@ -0,0 +1,11 @@ +from langflow.services.chat.manager import ChatManager +from langflow.services.factory import ServiceFactory + + +class ChatManagerFactory(ServiceFactory): + def __init__(self): + super().__init__(ChatManager) + + def create(self, settings_service): + # Here you would have logic to create and configure a ChatManager + return ChatManager() diff --git a/src/backend/langflow/chat/manager.py b/src/backend/langflow/services/chat/manager.py similarity index 94% rename from src/backend/langflow/chat/manager.py rename to src/backend/langflow/services/chat/manager.py index 1e93174e2f..a49f48273e 100644 --- a/src/backend/langflow/chat/manager.py +++ b/src/backend/langflow/services/chat/manager.py @@ -1,10 +1,12 @@ from collections import defaultdict from fastapi import WebSocket, status from langflow.api.v1.schemas import ChatMessage, ChatResponse, FileResponse -from langflow.cache import cache_manager -from langflow.cache.manager import Subject -from langflow.chat.utils import process_graph +from langflow.services.base import Service +from langflow.services import service_manager +from langflow.services.cache.manager import Subject +from langflow.services.chat.utils import process_graph from langflow.interface.utils import pil_to_base64 +from langflow.services.schema import ServiceType from langflow.utils.logger import logger @@ -12,7 +14,7 @@ import json from typing import Any, Dict, List -from langflow.cache.flow import InMemoryCache +from langflow.services.cache.flow import InMemoryCache class ChatHistory(Subject): @@ -42,11 +44,13 @@ def empty_history(self, client_id: str): self.history[client_id] = [] -class ChatManager: +class ChatManager(Service): + name = "chat_manager" + def __init__(self): self.active_connections: Dict[str, WebSocket] = {} self.chat_history = ChatHistory() - self.cache_manager = cache_manager + self.cache_manager = service_manager.get(ServiceType.CACHE_MANAGER) self.cache_manager.attach(self.update) self.in_memory_cache = InMemoryCache() @@ -117,7 +121,7 @@ async def process_message( self, client_id: str, payload: Dict, langchain_object: Any ): # Process the graph data and chat message - chat_inputs = payload.pop("inputs", "") + chat_inputs = payload.pop("inputs", {}) chat_inputs = ChatMessage(message=chat_inputs) self.chat_history.add_message(client_id, chat_inputs) diff --git a/src/backend/langflow/chat/utils.py b/src/backend/langflow/services/chat/utils.py similarity index 93% rename from src/backend/langflow/chat/utils.py rename to src/backend/langflow/services/chat/utils.py index 7db65b8e38..17c976eb90 100644 --- a/src/backend/langflow/chat/utils.py +++ b/src/backend/langflow/services/chat/utils.py @@ -21,9 +21,9 @@ async def process_graph( # Generate result and thought try: - if not chat_inputs.message: + if chat_inputs.message is None: logger.debug("No message provided") - raise ValueError("No message provided") + chat_inputs.message = {} logger.debug("Generating result and thought") result, intermediate_steps = await get_result_and_steps( diff --git a/src/backend/langflow/database/models/__init__.py b/src/backend/langflow/services/database/__init__.py similarity index 100% rename from src/backend/langflow/database/models/__init__.py rename to src/backend/langflow/services/database/__init__.py diff --git a/src/backend/langflow/services/database/factory.py b/src/backend/langflow/services/database/factory.py new file mode 100644 index 0000000000..fecf245430 --- /dev/null +++ b/src/backend/langflow/services/database/factory.py @@ -0,0 +1,17 @@ +from typing import TYPE_CHECKING +from langflow.services.database.manager import DatabaseManager +from langflow.services.factory import ServiceFactory + +if TYPE_CHECKING: + from langflow.services.settings.manager import SettingsManager + + +class DatabaseManagerFactory(ServiceFactory): + def __init__(self): + super().__init__(DatabaseManager) + + def create(self, settings_service: "SettingsManager"): + # Here you would have logic to create and configure a DatabaseManager + if not settings_service.settings.DATABASE_URL: + raise ValueError("No database URL provided") + return DatabaseManager(settings_service.settings.DATABASE_URL) diff --git a/src/backend/langflow/services/database/manager.py b/src/backend/langflow/services/database/manager.py new file mode 100644 index 0000000000..92385a457d --- /dev/null +++ b/src/backend/langflow/services/database/manager.py @@ -0,0 +1,67 @@ +from pathlib import Path +from langflow.services.base import Service +from sqlmodel import SQLModel, Session, create_engine +from langflow.utils.logger import logger +from alembic.config import Config +from alembic import command +from langflow.services.database import models # noqa + + +class DatabaseManager(Service): + name = "database_manager" + + def __init__(self, database_url: str): + self.database_url = database_url + # This file is in langflow.services.database.manager.py + # the ini is in langflow + langflow_dir = Path(__file__).parent.parent.parent + self.script_location = langflow_dir / "alembic" + self.alembic_cfg_path = langflow_dir / "alembic.ini" + self.engine = create_engine(database_url) + + def __enter__(self): + self._session = Session(self.engine) + return self._session + + def __exit__(self, exc_type, exc_value, traceback): + if exc_type is not None: # If an exception has been raised + logger.error( + f"Session rollback because of exception: {exc_type.__name__} {exc_value}" + ) + self._session.rollback() + else: + self._session.commit() + self._session.close() + + def get_session(self): + with Session(self.engine) as session: + yield session + + def run_migrations(self): + logger.info( + f"Running DB migrations in {self.script_location} on {self.database_url}" + ) + alembic_cfg = Config() + alembic_cfg.set_main_option("script_location", str(self.script_location)) + alembic_cfg.set_main_option("sqlalchemy.url", self.database_url) + command.upgrade(alembic_cfg, "head") + + def create_db_and_tables(self): + logger.debug("Creating database and tables") + try: + SQLModel.metadata.create_all(self.engine) + except Exception as exc: + logger.error(f"Error creating database and tables: {exc}") + raise RuntimeError("Error creating database and tables") from exc + + # Now check if the table "flow" exists, if not, something went wrong + # and we need to create the tables again. + from sqlalchemy import inspect + + inspector = inspect(self.engine) + if "flow" not in inspector.get_table_names(): + logger.error("Something went wrong creating the database and tables.") + logger.error("Please check your database settings.") + raise RuntimeError("Something went wrong creating the database and tables.") + else: + logger.debug("Database and tables created successfully") diff --git a/src/backend/langflow/services/database/models/__init__.py b/src/backend/langflow/services/database/models/__init__.py new file mode 100644 index 0000000000..da47bc5fee --- /dev/null +++ b/src/backend/langflow/services/database/models/__init__.py @@ -0,0 +1,4 @@ +from .flow import Flow + + +__all__ = ["Flow"] diff --git a/src/backend/langflow/database/models/base.py b/src/backend/langflow/services/database/models/base.py similarity index 100% rename from src/backend/langflow/database/models/base.py rename to src/backend/langflow/services/database/models/base.py diff --git a/src/backend/langflow/database/models/component.py b/src/backend/langflow/services/database/models/component.py similarity index 92% rename from src/backend/langflow/database/models/component.py rename to src/backend/langflow/services/database/models/component.py index bb2408cdbd..5c4e6c13a4 100644 --- a/src/backend/langflow/database/models/component.py +++ b/src/backend/langflow/services/database/models/component.py @@ -1,4 +1,4 @@ -from langflow.database.models.base import SQLModelSerializable, SQLModel +from langflow.services.database.models.base import SQLModelSerializable, SQLModel from sqlmodel import Field from typing import Optional from datetime import datetime diff --git a/src/backend/langflow/database/models/flow.py b/src/backend/langflow/services/database/models/flow.py similarity index 72% rename from src/backend/langflow/database/models/flow.py rename to src/backend/langflow/services/database/models/flow.py index f9e3aa249a..2bc83f9dcf 100644 --- a/src/backend/langflow/database/models/flow.py +++ b/src/backend/langflow/services/database/models/flow.py @@ -1,13 +1,12 @@ # Path: src/backend/langflow/database/models/flow.py -from langflow.database.models.base import SQLModelSerializable +from langflow.services.database.models.base import SQLModelSerializable from pydantic import validator -from sqlmodel import Field, Relationship, JSON, Column +from sqlmodel import Field, JSON, Column from uuid import UUID, uuid4 from typing import Dict, Optional # if TYPE_CHECKING: -from langflow.database.models.flow_style import FlowStyle, FlowStyleRead class FlowBase(SQLModelSerializable): @@ -35,11 +34,6 @@ def validate_json(v): class Flow(FlowBase, table=True): id: UUID = Field(default_factory=uuid4, primary_key=True, unique=True) data: Optional[Dict] = Field(default=None, sa_column=Column(JSON)) - style: Optional["FlowStyle"] = Relationship( - back_populates="flow", - # use "uselist=False" to make it a one-to-one relationship - sa_relationship_kwargs={"uselist": False}, - ) class FlowCreate(FlowBase): @@ -50,10 +44,6 @@ class FlowRead(FlowBase): id: UUID -class FlowReadWithStyle(FlowRead): - style: Optional["FlowStyleRead"] = None - - class FlowUpdate(SQLModelSerializable): name: Optional[str] = None description: Optional[str] = None diff --git a/src/backend/langflow/services/database/utils.py b/src/backend/langflow/services/database/utils.py new file mode 100644 index 0000000000..20b2bbbb42 --- /dev/null +++ b/src/backend/langflow/services/database/utils.py @@ -0,0 +1,31 @@ +from typing import TYPE_CHECKING +from langflow.utils.logger import logger +from contextlib import contextmanager + +from sqlmodel import Session + +if TYPE_CHECKING: + from langflow.services.database.manager import DatabaseManager + + +def initialize_database(): + logger.debug("Initializing database") + from langflow.services import service_manager, ServiceType + + database_manager = service_manager.get(ServiceType.DATABASE_MANAGER) + database_manager.run_migrations() + database_manager.create_db_and_tables() + logger.debug("Database initialized") + + +@contextmanager +def session_getter(db_manager: "DatabaseManager"): + try: + session = Session(db_manager.engine) + yield session + except Exception as e: + print("Session rollback because of exception:", e) + session.rollback() + raise + finally: + session.close() diff --git a/src/backend/langflow/services/factory.py b/src/backend/langflow/services/factory.py new file mode 100644 index 0000000000..c37f4e9c2c --- /dev/null +++ b/src/backend/langflow/services/factory.py @@ -0,0 +1,6 @@ +class ServiceFactory: + def __init__(self, service_class): + self.service_class = service_class + + def create(self, *args, **kwargs): + raise NotImplementedError diff --git a/src/backend/langflow/services/manager.py b/src/backend/langflow/services/manager.py new file mode 100644 index 0000000000..1606b3a828 --- /dev/null +++ b/src/backend/langflow/services/manager.py @@ -0,0 +1,87 @@ +from langflow.services.schema import ServiceType +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from langflow.services.factory import ServiceFactory + + +class ServiceManager: + """ + Manages the creation of different services. + """ + + def __init__(self): + self.services = {} + self.factories = {} + + def register_factory(self, service_factory: "ServiceFactory"): + """ + Registers a new factory. + """ + self.factories[service_factory.service_class.name] = service_factory + + def get(self, service_name: ServiceType): + """ + Get (or create) a service by its name. + """ + if service_name not in self.services: + self._create_service(service_name) + + return self.services[service_name] + + def _create_service(self, service_name: ServiceType): + """ + Create a new service given its name. + """ + self._validate_service_creation(service_name) + + if service_name == ServiceType.SETTINGS_MANAGER: + self.services[service_name] = self.factories[service_name].create() + else: + settings_service = self.get(ServiceType.SETTINGS_MANAGER) + self.services[service_name] = self.factories[service_name].create( + settings_service + ) + + def _validate_service_creation(self, service_name: ServiceType): + """ + Validate whether the service can be created. + """ + if service_name not in self.factories: + raise ValueError( + f"No factory registered for the service class '{service_name.name}'" + ) + + if ( + ServiceType.SETTINGS_MANAGER not in self.factories + and service_name != ServiceType.SETTINGS_MANAGER + ): + raise ValueError( + f"Cannot create service '{service_name.name}' before the settings service" + ) + + def update(self, service_name: ServiceType): + """ + Update a service by its name. + """ + if service_name in self.services: + self.services.pop(service_name, None) + self.get(service_name) + + +service_manager = ServiceManager() + + +def initialize_services(): + """ + Initialize all the services needed. + """ + from langflow.services.database import factory as database_factory + from langflow.services.cache import factory as cache_factory + from langflow.services.chat import factory as chat_factory + from langflow.services.settings import factory as settings_factory + + service_manager.register_factory(settings_factory.SettingsManagerFactory()) + service_manager.register_factory(database_factory.DatabaseManagerFactory()) + service_manager.register_factory(cache_factory.CacheManagerFactory()) + service_manager.register_factory(chat_factory.ChatManagerFactory()) diff --git a/src/backend/langflow/services/schema.py b/src/backend/langflow/services/schema.py new file mode 100644 index 0000000000..695763afce --- /dev/null +++ b/src/backend/langflow/services/schema.py @@ -0,0 +1,13 @@ +from enum import Enum + + +class ServiceType(str, Enum): + """ + Enum for the different types of services that can be + registered with the service manager. + """ + + CACHE_MANAGER = "cache_manager" + SETTINGS_MANAGER = "settings_manager" + DATABASE_MANAGER = "database_manager" + CHAT_MANAGER = "chat_manager" diff --git a/src/backend/langflow/services/settings/__init__.py b/src/backend/langflow/services/settings/__init__.py new file mode 100644 index 0000000000..2191bf2cc2 --- /dev/null +++ b/src/backend/langflow/services/settings/__init__.py @@ -0,0 +1,3 @@ +from . import factory, manager + +__all__ = ["factory", "manager"] diff --git a/src/backend/langflow/services/settings/base.py b/src/backend/langflow/services/settings/base.py new file mode 100644 index 0000000000..1eb2793b30 --- /dev/null +++ b/src/backend/langflow/services/settings/base.py @@ -0,0 +1,168 @@ +import contextlib +import json +import os +from typing import Optional, List +from pathlib import Path + +import yaml +from pydantic import BaseSettings, root_validator, validator +from langflow.utils.logger import logger + +BASE_COMPONENTS_PATH = str(Path(__file__).parent / "components") + + +class Settings(BaseSettings): + CHAINS: dict = {} + AGENTS: dict = {} + PROMPTS: dict = {} + LLMS: dict = {} + TOOLS: dict = {} + MEMORIES: dict = {} + EMBEDDINGS: dict = {} + VECTORSTORES: dict = {} + DOCUMENTLOADERS: dict = {} + WRAPPERS: dict = {} + RETRIEVERS: dict = {} + TOOLKITS: dict = {} + TEXTSPLITTERS: dict = {} + UTILITIES: dict = {} + OUTPUT_PARSERS: dict = {} + CUSTOM_COMPONENTS: dict = {} + + DEV: bool = False + DATABASE_URL: Optional[str] = None + CACHE: str = "InMemoryCache" + REMOVE_API_KEYS: bool = False + COMPONENTS_PATH: List[str] = [] + + @validator("DATABASE_URL", pre=True) + def set_database_url(cls, value): + if not value: + logger.debug( + "No database_url provided, trying LANGFLOW_DATABASE_URL env variable" + ) + if langflow_database_url := os.getenv("LANGFLOW_DATABASE_URL"): + value = langflow_database_url + logger.debug("Using LANGFLOW_DATABASE_URL env variable.") + else: + logger.debug("No DATABASE_URL env variable, using sqlite database") + value = "sqlite:///./langflow.db" + + return value + + @validator("COMPONENTS_PATH", pre=True) + def set_components_path(cls, value): + if os.getenv("LANGFLOW_COMPONENTS_PATH"): + logger.debug("Adding LANGFLOW_COMPONENTS_PATH to components_path") + langflow_component_path = os.getenv("LANGFLOW_COMPONENTS_PATH") + if ( + Path(langflow_component_path).exists() + and langflow_component_path not in value + ): + if isinstance(langflow_component_path, list): + for path in langflow_component_path: + if path not in value: + value.append(path) + logger.debug( + f"Extending {langflow_component_path} to components_path" + ) + elif langflow_component_path not in value: + value.append(langflow_component_path) + logger.debug( + f"Appending {langflow_component_path} to components_path" + ) + + if not value: + value = [BASE_COMPONENTS_PATH] + logger.debug("Setting default components path to components_path") + elif BASE_COMPONENTS_PATH not in value: + value.append(BASE_COMPONENTS_PATH) + logger.debug("Adding default components path to components_path") + + logger.debug(f"Components path: {value}") + return value + + class Config: + validate_assignment = True + extra = "ignore" + env_prefix = "LANGFLOW_" + + @root_validator(allow_reuse=True) + def validate_lists(cls, values): + for key, value in values.items(): + if key != "dev" and not value: + values[key] = [] + return values + + def update_from_yaml(self, file_path: str, dev: bool = False): + new_settings = load_settings_from_yaml(file_path) + self.CHAINS = new_settings.CHAINS or {} + self.AGENTS = new_settings.AGENTS or {} + self.PROMPTS = new_settings.PROMPTS or {} + self.LLMS = new_settings.LLMS or {} + self.TOOLS = new_settings.TOOLS or {} + self.MEMORIES = new_settings.MEMORIES or {} + self.WRAPPERS = new_settings.WRAPPERS or {} + self.TOOLKITS = new_settings.TOOLKITS or {} + self.TEXTSPLITTERS = new_settings.TEXTSPLITTERS or {} + self.UTILITIES = new_settings.UTILITIES or {} + self.EMBEDDINGS = new_settings.EMBEDDINGS or {} + self.VECTORSTORES = new_settings.VECTORSTORES or {} + self.DOCUMENTLOADERS = new_settings.DOCUMENTLOADERS or {} + self.RETRIEVERS = new_settings.RETRIEVERS or {} + self.OUTPUT_PARSERS = new_settings.OUTPUT_PARSERS or {} + self.CUSTOM_COMPONENTS = new_settings.CUSTOM_COMPONENTS or {} + self.COMPONENTS_PATH = new_settings.COMPONENTS_PATH or [] + self.DEV = dev + + def update_settings(self, **kwargs): + logger.debug("Updating settings") + for key, value in kwargs.items(): + # value may contain sensitive information, so we don't want to log it + if not hasattr(self, key): + logger.debug(f"Key {key} not found in settings") + continue + logger.debug(f"Updating {key}") + if isinstance(getattr(self, key), list): + # value might be a '[something]' string + with contextlib.suppress(json.decoder.JSONDecodeError): + value = json.loads(str(value)) + if isinstance(value, list): + for item in value: + if item not in getattr(self, key): + getattr(self, key).append(item) + logger.debug(f"Extended {key}") + else: + getattr(self, key).append(value) + logger.debug(f"Appended {key}") + + else: + setattr(self, key, value) + logger.debug(f"Updated {key}") + logger.debug(f"{key}: {getattr(self, key)}") + + +def save_settings_to_yaml(settings: Settings, file_path: str): + with open(file_path, "w") as f: + settings_dict = settings.dict() + yaml.dump(settings_dict, f) + + +def load_settings_from_yaml(file_path: str) -> Settings: + # Check if a string is a valid path or a file name + if "/" not in file_path: + # Get current path + current_path = os.path.dirname(os.path.abspath(__file__)) + + file_path = os.path.join(current_path, file_path) + + with open(file_path, "r") as f: + settings_dict = yaml.safe_load(f) + settings_dict = {k.upper(): v for k, v in settings_dict.items()} + + for key in settings_dict: + if key not in Settings.__fields__.keys(): + raise KeyError(f"Key {key} not found in settings") + logger.debug(f"Loading {len(settings_dict[key])} {key} from {file_path}") + + return Settings(**settings_dict) diff --git a/src/backend/langflow/services/settings/factory.py b/src/backend/langflow/services/settings/factory.py new file mode 100644 index 0000000000..ab22e22b80 --- /dev/null +++ b/src/backend/langflow/services/settings/factory.py @@ -0,0 +1,15 @@ +from pathlib import Path +from langflow.services.settings.manager import SettingsManager +from langflow.services.factory import ServiceFactory + + +class SettingsManagerFactory(ServiceFactory): + def __init__(self): + super().__init__(SettingsManager) + + def create(self): + # Here you would have logic to create and configure a SettingsManager + langflow_dir = Path(__file__).parent.parent.parent + return SettingsManager.load_settings_from_yaml( + str(langflow_dir / "config.yaml") + ) diff --git a/src/backend/langflow/services/settings/manager.py b/src/backend/langflow/services/settings/manager.py new file mode 100644 index 0000000000..a357c48048 --- /dev/null +++ b/src/backend/langflow/services/settings/manager.py @@ -0,0 +1,36 @@ +from langflow.services.base import Service +from langflow.services.settings.base import Settings +from langflow.utils.logger import logger +import os +import yaml + + +class SettingsManager(Service): + name = "settings_manager" + + def __init__(self, settings: Settings): + super().__init__() + self.settings = settings + + @classmethod + def load_settings_from_yaml(cls, file_path: str) -> "SettingsManager": + # Check if a string is a valid path or a file name + if "/" not in file_path: + # Get current path + current_path = os.path.dirname(os.path.abspath(__file__)) + + file_path = os.path.join(current_path, file_path) + + with open(file_path, "r") as f: + settings_dict = yaml.safe_load(f) + settings_dict = {k.upper(): v for k, v in settings_dict.items()} + + for key in settings_dict: + if key not in Settings.__fields__.keys(): + raise KeyError(f"Key {key} not found in settings") + logger.debug( + f"Loading {len(settings_dict[key])} {key} from {file_path}" + ) + + settings = Settings(**settings_dict) + return cls(settings) diff --git a/src/backend/langflow/services/settings/settings.py b/src/backend/langflow/services/settings/settings.py new file mode 100644 index 0000000000..439b3a1e44 --- /dev/null +++ b/src/backend/langflow/services/settings/settings.py @@ -0,0 +1,171 @@ +import contextlib +import json +import os +from typing import Optional, List +from pathlib import Path + +import yaml +from pydantic import BaseSettings, root_validator, validator +from langflow.utils.logger import logger + +BASE_COMPONENTS_PATH = str(Path(__file__).parent / "components") + + +class Settings(BaseSettings): + CHAINS: dict = {} + AGENTS: dict = {} + PROMPTS: dict = {} + LLMS: dict = {} + TOOLS: dict = {} + MEMORIES: dict = {} + EMBEDDINGS: dict = {} + VECTORSTORES: dict = {} + DOCUMENTLOADERS: dict = {} + WRAPPERS: dict = {} + RETRIEVERS: dict = {} + TOOLKITS: dict = {} + TEXTSPLITTERS: dict = {} + UTILITIES: dict = {} + OUTPUT_PARSERS: dict = {} + CUSTOM_COMPONENTS: dict = {} + + DEV: bool = False + DATABASE_URL: Optional[str] = None + CACHE: str = "InMemoryCache" + REMOVE_API_KEYS: bool = False + COMPONENTS_PATH: List[str] = [] + + @validator("DATABASE_URL", pre=True) + def set_database_url(cls, value): + if not value: + logger.debug( + "No database_url provided, trying LANGFLOW_DATABASE_URL env variable" + ) + if langflow_database_url := os.getenv("LANGFLOW_DATABASE_URL"): + value = langflow_database_url + logger.debug("Using LANGFLOW_DATABASE_URL env variable.") + else: + logger.debug("No DATABASE_URL env variable, using sqlite database") + value = "sqlite:///./langflow.db" + + return value + + @validator("COMPONENTS_PATH", pre=True) + def set_components_path(cls, value): + if os.getenv("LANGFLOW_COMPONENTS_PATH"): + logger.debug("Adding LANGFLOW_COMPONENTS_PATH to components_path") + langflow_component_path = os.getenv("LANGFLOW_COMPONENTS_PATH") + if ( + Path(langflow_component_path).exists() + and langflow_component_path not in value + ): + if isinstance(langflow_component_path, list): + for path in langflow_component_path: + if path not in value: + value.append(path) + logger.debug( + f"Extending {langflow_component_path} to components_path" + ) + elif langflow_component_path not in value: + value.append(langflow_component_path) + logger.debug( + f"Appending {langflow_component_path} to components_path" + ) + + if not value: + value = [BASE_COMPONENTS_PATH] + logger.debug("Setting default components path to components_path") + elif BASE_COMPONENTS_PATH not in value: + value.append(BASE_COMPONENTS_PATH) + logger.debug("Adding default components path to components_path") + + logger.debug(f"Components path: {value}") + return value + + class Config: + validate_assignment = True + extra = "ignore" + env_prefix = "LANGFLOW_" + + @root_validator(allow_reuse=True) + def validate_lists(cls, values): + for key, value in values.items(): + if key != "dev" and not value: + values[key] = [] + return values + + def update_from_yaml(self, file_path: str, dev: bool = False): + new_settings = load_settings_from_yaml(file_path) + self.CHAINS = new_settings.CHAINS or {} + self.AGENTS = new_settings.AGENTS or {} + self.PROMPTS = new_settings.PROMPTS or {} + self.LLMS = new_settings.LLMS or {} + self.TOOLS = new_settings.TOOLS or {} + self.MEMORIES = new_settings.MEMORIES or {} + self.WRAPPERS = new_settings.WRAPPERS or {} + self.TOOLKITS = new_settings.TOOLKITS or {} + self.TEXTSPLITTERS = new_settings.TEXTSPLITTERS or {} + self.UTILITIES = new_settings.UTILITIES or {} + self.EMBEDDINGS = new_settings.EMBEDDINGS or {} + self.VECTORSTORES = new_settings.VECTORSTORES or {} + self.DOCUMENTLOADERS = new_settings.DOCUMENTLOADERS or {} + self.RETRIEVERS = new_settings.RETRIEVERS or {} + self.OUTPUT_PARSERS = new_settings.OUTPUT_PARSERS or {} + self.CUSTOM_COMPONENTS = new_settings.CUSTOM_COMPONENTS or {} + self.COMPONENTS_PATH = new_settings.COMPONENTS_PATH or [] + self.DEV = dev + + def update_settings(self, **kwargs): + logger.debug("Updating settings") + for key, value in kwargs.items(): + # value may contain sensitive information, so we don't want to log it + if not hasattr(self, key): + logger.debug(f"Key {key} not found in settings") + continue + logger.debug(f"Updating {key}") + if isinstance(getattr(self, key), list): + # value might be a '[something]' string + with contextlib.suppress(json.decoder.JSONDecodeError): + value = json.loads(str(value)) + if isinstance(value, list): + for item in value: + if item not in getattr(self, key): + getattr(self, key).append(item) + logger.debug(f"Extended {key}") + else: + getattr(self, key).append(value) + logger.debug(f"Appended {key}") + + else: + setattr(self, key, value) + logger.debug(f"Updated {key}") + logger.debug(f"{key}: {getattr(self, key)}") + + +def save_settings_to_yaml(settings: Settings, file_path: str): + with open(file_path, "w") as f: + settings_dict = settings.dict() + yaml.dump(settings_dict, f) + + +def load_settings_from_yaml(file_path: str) -> Settings: + # Check if a string is a valid path or a file name + if "/" not in file_path: + # Get current path + current_path = os.path.dirname(os.path.abspath(__file__)) + + file_path = os.path.join(current_path, file_path) + + with open(file_path, "r") as f: + settings_dict = yaml.safe_load(f) + settings_dict = {k.upper(): v for k, v in settings_dict.items()} + + for key in settings_dict: + if key not in Settings.__fields__.keys(): + raise KeyError(f"Key {key} not found in settings") + logger.debug(f"Loading {len(settings_dict[key])} {key} from {file_path}") + + return Settings(**settings_dict) + + +settings = load_settings_from_yaml("config.yaml") diff --git a/src/backend/langflow/services/utils.py b/src/backend/langflow/services/utils.py new file mode 100644 index 0000000000..049e82c0fa --- /dev/null +++ b/src/backend/langflow/services/utils.py @@ -0,0 +1,18 @@ +from langflow.services import ServiceType, service_manager +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from langflow.services.settings.manager import SettingsManager + + +def get_settings_manager() -> "SettingsManager": + return service_manager.get(ServiceType.SETTINGS_MANAGER) + + +def get_db_manager(): + return service_manager.get(ServiceType.DATABASE_MANAGER) + + +def get_session(): + db_manager = service_manager.get(ServiceType.DATABASE_MANAGER) + yield from db_manager.get_session() diff --git a/src/backend/langflow/settings.py b/src/backend/langflow/settings.py deleted file mode 100644 index 08400a8116..0000000000 --- a/src/backend/langflow/settings.py +++ /dev/null @@ -1,130 +0,0 @@ -import os -from typing import Optional, List -from pathlib import Path - -import yaml -from pydantic import BaseSettings, root_validator -from langflow.utils.logger import logger - -BASE_COMPONENTS_PATH = Path(__file__).parent / "components" - - -class Settings(BaseSettings): - chains: dict = {} - agents: dict = {} - prompts: dict = {} - llms: dict = {} - tools: dict = {} - memories: dict = {} - embeddings: dict = {} - vectorstores: dict = {} - documentloaders: dict = {} - wrappers: dict = {} - retrievers: dict = {} - toolkits: dict = {} - textsplitters: dict = {} - utilities: dict = {} - output_parsers: dict = {} - custom_components: dict = {} - - dev: bool = False - database_url: Optional[str] = None - cache: str = "InMemoryCache" - remove_api_keys: bool = False - components_path: List[Path] - - @root_validator(pre=True) - def set_env_variables(cls, values): - if "database_url" not in values: - logger.debug( - "No database_url provided, trying LANGFLOW_DATABASE_URL env variable" - ) - if langflow_database_url := os.getenv("LANGFLOW_DATABASE_URL"): - values["database_url"] = langflow_database_url - else: - logger.debug("No DATABASE_URL env variable, using sqlite database") - values["database_url"] = "sqlite:///./langflow.db" - - if not values.get("components_path"): - values["components_path"] = [BASE_COMPONENTS_PATH] - logger.debug("No components_path provided, using default components path") - elif BASE_COMPONENTS_PATH not in values["components_path"]: - values["components_path"].append(BASE_COMPONENTS_PATH) - logger.debug("Adding default components path to components_path") - - if os.getenv("LANGFLOW_COMPONENTS_PATH"): - logger.debug("Adding LANGFLOW_COMPONENTS_PATH to components_path") - langflow_component_path = Path(os.getenv("LANGFLOW_COMPONENTS_PATH")) - if ( - langflow_component_path.exists() - and langflow_component_path not in values["components_path"] - ): - values["components_path"].append(langflow_component_path) - logger.debug(f"Adding {langflow_component_path} to components_path") - return values - - class Config: - validate_assignment = True - extra = "ignore" - - @root_validator(allow_reuse=True) - def validate_lists(cls, values): - for key, value in values.items(): - if key != "dev" and not value: - values[key] = [] - return values - - def update_from_yaml(self, file_path: str, dev: bool = False): - new_settings = load_settings_from_yaml(file_path) - self.chains = new_settings.chains or {} - self.agents = new_settings.agents or {} - self.prompts = new_settings.prompts or {} - self.llms = new_settings.llms or {} - self.tools = new_settings.tools or {} - self.memories = new_settings.memories or {} - self.wrappers = new_settings.wrappers or {} - self.toolkits = new_settings.toolkits or {} - self.textsplitters = new_settings.textsplitters or {} - self.utilities = new_settings.utilities or {} - self.embeddings = new_settings.embeddings or {} - self.vectorstores = new_settings.vectorstores or {} - self.documentloaders = new_settings.documentloaders or {} - self.retrievers = new_settings.retrievers or {} - self.output_parsers = new_settings.output_parsers or {} - self.custom_components = new_settings.custom_components or {} - self.components_path = new_settings.components_path or [] - self.dev = dev - - def update_settings(self, **kwargs): - for key, value in kwargs.items(): - if hasattr(self, key): - if isinstance(getattr(self, key), list): - if isinstance(value, list): - getattr(self, key).extend(value) - else: - getattr(self, key).append(value) - else: - setattr(self, key, value) - - -def save_settings_to_yaml(settings: Settings, file_path: str): - with open(file_path, "w") as f: - settings_dict = settings.dict() - yaml.dump(settings_dict, f) - - -def load_settings_from_yaml(file_path: str) -> Settings: - # Check if a string is a valid path or a file name - if "/" not in file_path: - # Get current path - current_path = os.path.dirname(os.path.abspath(__file__)) - - file_path = os.path.join(current_path, file_path) - - with open(file_path, "r") as f: - settings_dict = yaml.safe_load(f) - - return Settings(**settings_dict) - - -settings = load_settings_from_yaml("config.yaml") diff --git a/src/backend/langflow/template/frontend_node/utilities.py b/src/backend/langflow/template/frontend_node/utilities.py index 615d7d12ff..df993e3772 100644 --- a/src/backend/langflow/template/frontend_node/utilities.py +++ b/src/backend/langflow/template/frontend_node/utilities.py @@ -12,8 +12,11 @@ def format_field(field: TemplateField, name: Optional[str] = None) -> None: FrontendNode.format_field(field, name) # field.field_type could be "Literal['news', 'search', 'places', 'images'] # we need to convert it to a list + # It seems it could also be like "typing_extensions.['news', 'search', 'places', 'images']" if "Literal" in field.field_type: - field.options = ast.literal_eval(field.field_type.replace("Literal", "")) + field_type = field.field_type.replace("typing_extensions.", "") + field_type = field_type.replace("Literal", "") + field.options = ast.literal_eval(field_type) field.is_list = True field.field_type = "str" diff --git a/src/backend/langflow/template/frontend_node/vectorstores.py b/src/backend/langflow/template/frontend_node/vectorstores.py index 53a840b802..23c2934377 100644 --- a/src/backend/langflow/template/frontend_node/vectorstores.py +++ b/src/backend/langflow/template/frontend_node/vectorstores.py @@ -4,6 +4,52 @@ from langflow.template.frontend_node.base import FrontendNode +BASIC_FIELDS = [ + "work_dir", + "collection_name", + "api_key", + "location", + "persist_directory", + "persist", + "weaviate_url", + "index_name", + "namespace", + "folder_path", + "table_name", + "query_name", + "supabase_url", + "supabase_service_key", + "mongodb_atlas_cluster_uri", + "collection_name", + "db_name", +] +ADVANCED_FIELDS = [ + "n_dim", + "key", + "prefix", + "distance_func", + "content_payload_key", + "metadata_payload_key", + "timeout", + "host", + "path", + "url", + "port", + "https", + "prefer_grpc", + "grpc_port", + "pinecone_api_key", + "pinecone_env", + "client_kwargs", + "search_kwargs", + "chroma_server_host", + "chroma_server_http_port", + "chroma_server_ssl_enabled", + "chroma_server_grpc_port", + "chroma_server_cors_allow_origins", +] + + class VectorStoreFrontendNode(FrontendNode): def add_extra_fields(self) -> None: extra_fields: List[TemplateField] = [] @@ -45,16 +91,62 @@ def add_extra_fields(self) -> None: elif self.template.type_name == "Chroma": # New bool field for persist parameter - extra_field = TemplateField( - name="persist", - field_type="bool", - required=False, - show=True, - advanced=False, - value=False, - display_name="Persist", - ) - extra_fields.append(extra_field) + chroma_fields = [ + TemplateField( + name="persist", + field_type="bool", + required=False, + show=True, + advanced=False, + value=False, + display_name="Persist", + ), + # chroma_server_grpc_port: str | None = None, + TemplateField( + name="chroma_server_host", + field_type="str", + required=False, + show=True, + advanced=True, + display_name="Chroma Server Host", + ), + TemplateField( + name="chroma_server_http_port", + field_type="str", + required=False, + show=True, + advanced=True, + display_name="Chroma Server HTTP Port", + ), + TemplateField( + name="chroma_server_ssl_enabled", + field_type="bool", + required=False, + show=True, + advanced=True, + value=False, + display_name="Chroma Server SSL Enabled", + ), + TemplateField( + name="chroma_server_grpc_port", + field_type="str", + required=False, + show=True, + advanced=True, + display_name="Chroma Server GRPC Port", + ), + TemplateField( + name="chroma_server_cors_allow_origins", + field_type="str", + required=False, + is_list=True, + show=True, + advanced=True, + display_name="Chroma Server CORS Allow Origins", + ), + ] + + extra_fields.extend(chroma_fields) elif self.template.type_name == "Pinecone": # add pinecone_api_key and pinecone_env extra_field = TemplateField( @@ -208,45 +300,6 @@ def add_extra_base_classes(self) -> None: def format_field(field: TemplateField, name: Optional[str] = None) -> None: FrontendNode.format_field(field, name) # Define common field attributes - basic_fields = [ - "work_dir", - "collection_name", - "api_key", - "location", - "persist_directory", - "persist", - "weaviate_url", - "index_name", - "namespace", - "folder_path", - "table_name", - "query_name", - "supabase_url", - "supabase_service_key", - "mongodb_atlas_cluster_uri", - "collection_name", - "db_name", - ] - advanced_fields = [ - "n_dim", - "key", - "prefix", - "distance_func", - "content_payload_key", - "metadata_payload_key", - "timeout", - "host", - "path", - "url", - "port", - "https", - "prefer_grpc", - "grpc_port", - "pinecone_api_key", - "pinecone_env", - "client_kwargs", - "search_kwargs", - ] # Check and set field attributes if field.name == "texts": @@ -269,7 +322,7 @@ def format_field(field: TemplateField, name: Optional[str] = None) -> None: field.display_name = "Embedding" field.field_type = "Embeddings" - elif field.name in basic_fields: + elif field.name in BASIC_FIELDS: field.show = True field.advanced = False if field.name == "api_key": @@ -279,7 +332,7 @@ def format_field(field: TemplateField, name: Optional[str] = None) -> None: field.value = ":memory:" field.placeholder = ":memory:" - elif field.name in advanced_fields: + elif field.name in ADVANCED_FIELDS: field.show = True field.advanced = True if "key" in field.name: diff --git a/src/backend/langflow/utils/lazy_load.py b/src/backend/langflow/utils/lazy_load.py new file mode 100644 index 0000000000..df0130acc5 --- /dev/null +++ b/src/backend/langflow/utils/lazy_load.py @@ -0,0 +1,15 @@ +class LazyLoadDictBase: + def __init__(self): + self._all_types_dict = None + + @property + def all_types_dict(self): + if self._all_types_dict is None: + self._all_types_dict = self._build_dict() + return self._all_types_dict + + def _build_dict(self): + raise NotImplementedError + + def get_type_dict(self): + raise NotImplementedError diff --git a/src/frontend/.dockerignore b/src/frontend/.dockerignore new file mode 100644 index 0000000000..ca57620074 --- /dev/null +++ b/src/frontend/.dockerignore @@ -0,0 +1,2 @@ +**/node_modules +**/build \ No newline at end of file diff --git a/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx b/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx index e91d65a3e0..72c2a8e4dc 100644 --- a/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx +++ b/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx @@ -87,7 +87,7 @@ export default function ParameterComponent({ useEffect(() => { if (name === "openai_api_base") console.log(info); infoHtml.current = ( -

+
{info.split("\n").map((line, i) => (

{line} diff --git a/src/frontend/src/alerts/alertDropDown/components/singleAlertComponent/index.tsx b/src/frontend/src/alerts/alertDropDown/components/singleAlertComponent/index.tsx index 9a94db7d5f..ca15abc860 100644 --- a/src/frontend/src/alerts/alertDropDown/components/singleAlertComponent/index.tsx +++ b/src/frontend/src/alerts/alertDropDown/components/singleAlertComponent/index.tsx @@ -36,14 +36,14 @@ export default function SingleAlert({ />

-

+

{dropItem.title}

{dropItem.list ? (
    {dropItem.list.map((item, idx) => ( -
  • +
  • {item}
  • ))} diff --git a/src/frontend/src/alerts/notice/index.tsx b/src/frontend/src/alerts/notice/index.tsx index 494d9d4dc4..1acd5c8984 100644 --- a/src/frontend/src/alerts/notice/index.tsx +++ b/src/frontend/src/alerts/notice/index.tsx @@ -47,7 +47,9 @@ export default function NoticeAlert({ />
-

{title}

+

+ {title} +

{link !== "" ? ( { return { ...old, diff --git a/src/frontend/src/components/chatComponent/index.tsx b/src/frontend/src/components/chatComponent/index.tsx index 3cbe5bbf1b..2b32dc0c46 100644 --- a/src/frontend/src/components/chatComponent/index.tsx +++ b/src/frontend/src/components/chatComponent/index.tsx @@ -62,8 +62,7 @@ export default function Chat({ flow }: ChatType) { tabsState && tabsState[flow.id] && tabsState[flow.id].formKeysData && - tabsState[flow.id].formKeysData.input_keys && - Object.keys(tabsState[flow.id].formKeysData.input_keys).length > 0 + tabsState[flow.id].formKeysData.input_keys !== null ) { setCanOpen(true); } else { diff --git a/src/frontend/src/components/codeTabsComponent/index.tsx b/src/frontend/src/components/codeTabsComponent/index.tsx index bc9ca3a5a4..27653fbd71 100644 --- a/src/frontend/src/components/codeTabsComponent/index.tsx +++ b/src/frontend/src/components/codeTabsComponent/index.tsx @@ -132,21 +132,25 @@ export default function CodeTabsComponent({ }} >

- - {tabs.map((tab, index) => ( - - {tab.name} - - ))} - + {tabs.length > 0 && tabs[0].name !== "" ? ( + + {tabs.map((tab, index) => ( + + {tab.name} + + ))} + + ) : ( +
+ )} {Number(activeTab) < 4 && ( -
+
- + {promptOpen ? template?.split("\n")?.map((line, index) => { const regex = /{([^}]+)}/g; diff --git a/src/frontend/src/modals/formModal/index.tsx b/src/frontend/src/modals/formModal/index.tsx index 751eacea1f..81b15559cf 100644 --- a/src/frontend/src/modals/formModal/index.tsx +++ b/src/frontend/src/modals/formModal/index.tsx @@ -67,14 +67,17 @@ export default function FormModal({ const id = useRef(flow.id); const tabsStateFlowId = tabsState[flow.id]; const tabsStateFlowIdFormKeysData = tabsStateFlowId.formKeysData; - const [chatKey, setChatKey] = useState( - Object.keys(tabsState[flow.id].formKeysData.input_keys).find( - (k) => - !tabsState[flow.id].formKeysData.handle_keys.some((j) => j === k) && - tabsState[flow.id].formKeysData.input_keys[k] === "" - ) - ); - + const [chatKey, setChatKey] = useState(() => { + if (tabsState[flow.id]?.formKeysData?.input_keys) { + return Object.keys(tabsState[flow.id].formKeysData.input_keys).find( + (k) => + !tabsState[flow.id].formKeysData.handle_keys.some((j) => j === k) && + tabsState[flow.id].formKeysData.input_keys[k] === "" + ); + } + // TODO: return a sensible default + return ""; + }); useEffect(() => { if (messagesRef.current) { messagesRef.current.scrollTop = messagesRef.current.scrollHeight; @@ -419,68 +422,70 @@ export default function FormModal({
- {Object.keys(tabsState[id.current].formKeysData.input_keys).map( - (i, k) => ( -
- - - {i} - + {tabsState[id.current]?.formKeysData?.input_keys + ? Object.keys( + tabsState[id.current].formKeysData.input_keys + ).map((i, k) => ( +
+ + + {i} + -
{ - event.stopPropagation(); - }} - > - - handleOnCheckedChange(value, i) - } - size="small" - disabled={tabsState[ - id.current - ].formKeysData.handle_keys.some((t) => t === i)} - /> +
{ + event.stopPropagation(); + }} + > + + handleOnCheckedChange(value, i) + } + size="small" + disabled={tabsState[ + id.current + ].formKeysData.handle_keys.some((t) => t === i)} + /> +
+ } + key={k} + keyValue={i} + > +
+ {tabsState[id.current].formKeysData.handle_keys.some( + (t) => t === i + ) && ( +
+ Source: Component +
+ )} +
- } - key={k} - keyValue={i} - > -
- {tabsState[id.current].formKeysData.handle_keys.some( - (t) => t === i - ) && ( -
- Source: Component -
- )} - -
-
-
- ) - )} +
+
+ )) + : null} {tabsState[id.current].formKeysData.memory_keys.map((i, k) => (

@@ -377,11 +399,9 @@ chat_input_field: Input key that you want the chat to send the user message with ${ tabsState[flow.id] && tabsState[flow.id].formKeysData ? `chat_inputs='${inputs}' - chat_input_field="${ - Object.keys(tabsState[flow.id].formKeysData.input_keys)[0] - }" + chat_input_field="${chat_input_field}" ` : "" - }host_url="http://localhost:7860" + }host_url="http://localhost:7860" >`; } diff --git a/src/frontend/tailwind.config.js b/src/frontend/tailwind.config.js index ea683e8440..52330ae92a 100644 --- a/src/frontend/tailwind.config.js +++ b/src/frontend/tailwind.config.js @@ -155,7 +155,9 @@ module.exports = { overflow: "hidden", "text-overflow": "ellipsis", }, - + ".word-break-break-word": { + wordBreak: "break-word", + }, ".arrow-hide": { "&::-webkit-inner-spin-button": { "-webkit-appearance": "none", diff --git a/tests/conftest.py b/tests/conftest.py index 45a8f8f1f3..e90d03d0a1 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,7 +1,7 @@ from contextlib import contextmanager import json from pathlib import Path -from typing import AsyncGenerator +from typing import AsyncGenerator, TYPE_CHECKING from langflow.api.v1.flows import get_session from langflow.graph.graph.base import Graph @@ -10,6 +10,10 @@ from httpx import AsyncClient from sqlmodel import SQLModel, Session, create_engine from sqlmodel.pool import StaticPool +from typer.testing import CliRunner + +if TYPE_CHECKING: + from langflow.services.database.manager import DatabaseManager def pytest_configure(): @@ -114,8 +118,8 @@ def get_session_override(): app = create_app() app.dependency_overrides[get_session] = get_session_override - - yield TestClient(app) + with TestClient(app) as client: + yield client app.dependency_overrides.clear() @@ -134,15 +138,20 @@ def get_session_override(): # create a fixture for session_getter above @pytest.fixture(name="session_getter") -def session_getter_fixture(): +def session_getter_fixture(client): engine = create_engine( "sqlite://", connect_args={"check_same_thread": False}, poolclass=StaticPool ) SQLModel.metadata.create_all(engine) @contextmanager - def blank_session_getter(): - with Session(engine) as session: + def blank_session_getter(db_manager: "DatabaseManager"): + with Session(db_manager.engine) as session: yield session yield blank_session_getter + + +@pytest.fixture +def runner(): + return CliRunner() diff --git a/tests/test_cache_manager.py b/tests/test_cache_manager.py index f3e65481e8..660512634e 100644 --- a/tests/test_cache_manager.py +++ b/tests/test_cache_manager.py @@ -2,7 +2,7 @@ import pandas as pd import pytest -from langflow.cache.manager import CacheManager +from langflow.services.cache.manager import CacheManager from PIL import Image diff --git a/tests/test_cli.py b/tests/test_cli.py new file mode 100644 index 0000000000..408500d7a6 --- /dev/null +++ b/tests/test_cli.py @@ -0,0 +1,30 @@ +from pathlib import Path +from tempfile import tempdir +from langflow.__main__ import app +import pytest + +from langflow.services import utils + + +@pytest.fixture(scope="module") +def default_settings(): + return [ + "--backend-only", + "--no-open-browser", + ] + + +def test_components_path(runner, client, default_settings): + # Create a foldr in the tmp directory + temp_dir = Path(tempdir) + # create a "components" folder + temp_dir = temp_dir / "components" + temp_dir.mkdir(exist_ok=True) + + result = runner.invoke( + app, + ["--components-path", str(temp_dir), *default_settings], + ) + assert result.exit_code == 0, result.stdout + settings_manager = utils.get_settings_manager() + assert temp_dir in settings_manager.settings.COMPONENTS_PATH diff --git a/tests/test_custom_component.py b/tests/test_custom_component.py index 199906dda8..f20311ceca 100644 --- a/tests/test_custom_component.py +++ b/tests/test_custom_component.py @@ -5,7 +5,7 @@ from fastapi import HTTPException -from langflow.database.models.flow import Flow, FlowCreate +from langflow.services.database.models.flow import Flow, FlowCreate from langflow.interface.custom.base import CustomComponent from langflow.interface.custom.component import ( Component, diff --git a/tests/test_database.py b/tests/test_database.py index bc512b6b0f..52a5daa4c0 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -5,16 +5,9 @@ from sqlalchemy.orm import Session from fastapi.testclient import TestClient -from fastapi.encoders import jsonable_encoder from langflow.api.v1.schemas import FlowListCreate -from langflow.database.models.flow import Flow, FlowCreate, FlowUpdate - -from langflow.database.models.flow_style import ( - FlowStyleCreate, - FlowStyleRead, - FlowStyleUpdate, -) +from langflow.services.database.models.flow import Flow, FlowCreate, FlowUpdate @pytest.fixture(scope="module") @@ -56,33 +49,12 @@ def test_read_flows(client: TestClient, json_flow: str): assert response.json()["name"] == flow.name assert response.json()["data"] == flow.data - flow_style = FlowStyleCreate(color="red", emoji="👍", flow_id=response.json()["id"]) - response = client.post( - "api/v1/flow_styles/", json=jsonable_encoder(flow_style.dict()) - ) - assert response.status_code == 200 - assert response.json()["color"] == flow_style.color - assert response.json()["emoji"] == flow_style.emoji - assert response.json()["flow_id"] == str(flow_style.flow_id) - flow = FlowCreate(name="Test Flow", description="description", data=data) response = client.post("api/v1/flows/", json=flow.dict()) assert response.status_code == 201 assert response.json()["name"] == flow.name assert response.json()["data"] == flow.data - # Now we need to create FlowStyle objects for each Flow - flow_style = FlowStyleCreate( - color="green", emoji="👍", flow_id=response.json()["id"] - ) - response = client.post( - "api/v1/flow_styles/", json=jsonable_encoder(flow_style.dict()) - ) - assert response.status_code == 200 - assert response.json()["color"] == flow_style.color - assert response.json()["emoji"] == flow_style.emoji - assert response.json()["flow_id"] == str(flow_style.flow_id) - response = client.get("api/v1/flows/") assert response.status_code == 200 assert len(response.json()) > 0 @@ -97,21 +69,10 @@ def test_read_flow(client: TestClient, json_flow: str): # turn it into a UUID flow_id = UUID(flow_id) - flow_style = FlowStyleCreate(color="green", emoji="👍", flow_id=flow_id) - response = client.post( - "api/v1/flow_styles/", json=jsonable_encoder(flow_style.dict()) - ) - assert response.status_code == 200 - response_json = response.json() - assert response_json["color"] == flow_style.color - assert response_json["emoji"] == flow_style.emoji - assert response_json["flow_id"] == str(flow_style.flow_id) - response = client.get(f"api/v1/flows/{flow_id}") assert response.status_code == 200 assert response.json()["name"] == flow.name assert response.json()["data"] == flow.data - assert response.json()["style"]["color"] == flow_style.color def test_update_flow(client: TestClient, json_flow: str): @@ -275,66 +236,3 @@ def test_read_empty_flows(client: TestClient): response = client.get("api/v1/flows/") assert response.status_code == 200 assert len(response.json()) == 0 - - -def test_create_flow_style(client: TestClient): - flow_style = FlowStyleCreate(color="red", emoji="🔴") - response = client.post("api/v1/flow_styles/", json=flow_style.dict()) - assert response.status_code == 200 - created_flow_style = FlowStyleRead(**response.json()) - assert created_flow_style.color == flow_style.color - assert created_flow_style.emoji == flow_style.emoji - - -def test_read_flow_styles(client: TestClient): - response = client.get("api/v1/flow_styles/") - assert response.status_code == 200 - flow_styles = [FlowStyleRead(**flow_style) for flow_style in response.json()] - assert not flow_styles - # Create test data - flow_style = FlowStyleCreate(color="red", emoji="🔴") - response = client.post("api/v1/flow_styles/", json=flow_style.dict()) - assert response.status_code == 200 - # Check response data - response = client.get("api/v1/flow_styles/") - assert response.status_code == 200 - flow_styles = [FlowStyleRead(**flow_style) for flow_style in response.json()] - assert len(flow_styles) == 1 - assert flow_styles[0].color == flow_style.color - assert flow_styles[0].emoji == flow_style.emoji - - -def test_read_flow_style(client: TestClient): - flow_style = FlowStyleCreate(color="red", emoji="🔴") - response = client.post("api/v1/flow_styles/", json=flow_style.dict()) - created_flow_style = FlowStyleRead(**response.json()) - response = client.get(f"api/v1/flow_styles/{created_flow_style.id}") - assert response.status_code == 200 - read_flow_style = FlowStyleRead(**response.json()) - assert read_flow_style == created_flow_style - - -def test_update_flow_style(client: TestClient): - flow_style = FlowStyleCreate(color="red", emoji="🔴") - response = client.post("api/v1/flow_styles/", json=flow_style.dict()) - created_flow_style = FlowStyleRead(**response.json()) - to_update_flow_style = FlowStyleUpdate(color="blue") - response = client.patch( - f"api/v1/flow_styles/{created_flow_style.id}", json=to_update_flow_style.dict() - ) - assert response.status_code == 200 - updated_flow_style = FlowStyleRead(**response.json()) - assert updated_flow_style.color == "blue" - assert updated_flow_style.emoji == flow_style.emoji - - -def test_delete_flow_style(client: TestClient): - flow_style = FlowStyleCreate(color="red", emoji="🔴") - response = client.post("api/v1/flow_styles/", json=flow_style.dict()) - created_flow_style = FlowStyleRead(**response.json()) - response = client.delete(f"api/v1/flow_styles/{created_flow_style.id}") - assert response.status_code == 200 - assert response.json() == {"message": "FlowStyle deleted successfully"} - - response = client.get(f"api/v1/flow_styles/{created_flow_style.id}") - assert response.status_code == 404 diff --git a/tests/test_graph.py b/tests/test_graph.py index 228bbb4d6f..f3efe36142 100644 --- a/tests/test_graph.py +++ b/tests/test_graph.py @@ -12,7 +12,6 @@ FileToolVertex, LLMVertex, ToolkitVertex, - WrapperVertex, ) from langflow.processing.process import get_result_and_thought from langflow.utils.payload import get_root_node @@ -292,11 +291,11 @@ def test_file_tool_node_build(openapi_graph): assert not Path(file_path).exists() -def test_wrapper_node_build(openapi_graph): - wrapper_node = get_node_by_type(openapi_graph, WrapperVertex) - assert wrapper_node is not None - built_object = wrapper_node.build() - assert built_object is not None +# def test_wrapper_node_build(openapi_graph): +# wrapper_node = get_node_by_type(openapi_graph, WrapperVertex) +# assert wrapper_node is not None +# built_object = wrapper_node.build() +# assert built_object is not None def test_get_result_and_thought(basic_graph): diff --git a/tests/test_llms_template.py b/tests/test_llms_template.py index 6bb1bc28dc..f1b76e18ee 100644 --- a/tests/test_llms_template.py +++ b/tests/test_llms_template.py @@ -1,13 +1,14 @@ from fastapi.testclient import TestClient -from langflow.settings import settings +from langflow.services.utils import get_settings_manager def test_llms_settings(client: TestClient): + settings_manager = get_settings_manager() response = client.get("api/v1/all") assert response.status_code == 200 json_response = response.json() llms = json_response["llms"] - assert set(llms.keys()) == set(settings.llms) + assert set(llms.keys()) == set(settings_manager.settings.LLMS) # def test_hugging_face_hub(client: TestClient): diff --git a/tests/test_prompts_template.py b/tests/test_prompts_template.py index afc595a414..dde313c20c 100644 --- a/tests/test_prompts_template.py +++ b/tests/test_prompts_template.py @@ -1,13 +1,14 @@ from fastapi.testclient import TestClient -from langflow.settings import settings +from langflow.services.utils import get_settings_manager def test_prompts_settings(client: TestClient): + settings_manager = get_settings_manager() response = client.get("api/v1/all") assert response.status_code == 200 json_response = response.json() prompts = json_response["prompts"] - assert set(prompts.keys()) == set(settings.prompts) + assert set(prompts.keys()) == set(settings_manager.settings.PROMPTS) def test_prompt_template(client: TestClient): diff --git a/tests/test_vectorstore_template.py b/tests/test_vectorstore_template.py index 0aa8237864..6ae4843ac9 100644 --- a/tests/test_vectorstore_template.py +++ b/tests/test_vectorstore_template.py @@ -1,12 +1,13 @@ from fastapi.testclient import TestClient -from langflow.settings import settings +from langflow.services.utils import get_settings_manager # check that all agents are in settings.agents # are in json_response["agents"] def test_vectorstores_settings(client: TestClient): + settings_manager = get_settings_manager() response = client.get("api/v1/all") assert response.status_code == 200 json_response = response.json() vectorstores = json_response["vectorstores"] - assert set(vectorstores.keys()) == set(settings.vectorstores) + assert set(vectorstores.keys()) == set(settings_manager.settings.VECTORSTORES) diff --git a/tests/test_websocket.py b/tests/test_websocket.py index 57a0e95f68..dd668c287d 100644 --- a/tests/test_websocket.py +++ b/tests/test_websocket.py @@ -1,6 +1,6 @@ from fastapi import WebSocketDisconnect -# from langflow.chat.manager import ChatManager +# from langflow.services.chat.manager import ChatManager import pytest