diff --git a/.gitignore b/.gitignore index 9d71e19b26..3b6cfebbfe 100644 --- a/.gitignore +++ b/.gitignore @@ -251,3 +251,5 @@ langflow.db # docusaurus .docusaurus/ + +/tmp/* diff --git a/.vscode/launch.json b/.vscode/launch.json index 3b458aa81d..e09e76cc87 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -6,7 +6,8 @@ "request": "launch", "module": "uvicorn", "args": [ - "langflow.main:app", + "--factory", + "langflow.main:create_app", "--port", "7860", "--reload", diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index f2c471b1a1..da7ec1977f 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -9,7 +9,7 @@ Please do not try to push directly to this repo unless you are a maintainer. ## ๐Ÿ—บ๏ธContributing Guidelines -### ๐ŸšฉGitHub Issues +## ๐ŸšฉGitHub Issues Our [issues](https://github.com/logspace-ai/langflow/issues) page is kept up to date with bugs, improvements, and feature requests. There is a taxonomy of labels to help @@ -33,18 +33,19 @@ so that more people can benefit from it. [collapses the content](https://developer.mozilla.org/en/docs/Web/HTML/Element/details) so it only becomes visible on click, making the issue easier to read and follow. -### Issue labels +## Issue labels [See this page](https://github.com/logspace-ai/langflow/labels) for an overview of the system we use to tag our issues and pull requests. +## Local development -### Local development You can develop Langflow using docker compose, or locally. We provide a .vscode/launch.json file for debugging the backend in VSCode, which is a lot faster than using docker compose. Setting up hooks: + ```bash make init ``` @@ -53,30 +54,46 @@ This will install the pre-commit hooks, which will run `make format` on every co It is advised to run `make lint` before pushing to the repository. -#### **Locally** -Run locally by cloning the repository and installing the dependencies. We recommend using a virtual environment to isolate the dependencies from your system. +## Run locally + +Langflow can run locally by cloning the repository and installing the dependencies. We recommend using a virtual environment to isolate the dependencies from your system. Before you start, make sure you have the following installed: - - Poetry (>=1.4) - - Node.js -For the backend, you will need to install the dependencies and start the development server. +- Poetry (>=1.4) +- Node.js + +Then, in the root folder, install the dependencies and start the development server for the backend: + ```bash -make install_backend make backend ``` -For the frontend, you will need to install the dependencies and start the development server. + +And the frontend: + ```bash make frontend ``` +## Docker compose + +The following snippet will run the backend and frontend in separate containers. The frontend will be available at `localhost:3000` and the backend at `localhost:7860`. -#### **Docker compose** -This will run the backend and frontend in separate containers. The frontend will be available at `localhost:3000` and the backend at `localhost:7860`. ```bash docker compose up --build # or make dev build=1 ``` +## Documentation + +The documentation is built using [Docusaurus](https://docusaurus.io/). To run the documentation locally, run the following commands: + +```bash +cd docs +npm install +npm run start +``` +The documentation will be available at `localhost:3000` and all the files are located in the `docs/docs` folder. +Once you are done with your changes, you can create a Pull Request to the `main` branch. diff --git a/Makefile b/Makefile index 79e27833ec..ff540da0d5 100644 --- a/Makefile +++ b/Makefile @@ -46,7 +46,7 @@ install_backend: backend: make install_backend - poetry run uvicorn src.backend.langflow.main:app --port 7860 --reload --log-level debug + poetry run uvicorn --factory src.backend.langflow.main:create_app --port 7860 --reload --log-level debug build_and_run: echo 'Removing dist folder' diff --git a/README.md b/README.md index e88f9c762b..dcd91e0755 100644 --- a/README.md +++ b/README.md @@ -30,14 +30,14 @@ - [Table of Contents](#table-of-contents) - [๐Ÿ“ฆ Installation](#-installation) - [Locally](#locally) - - [HuggingFace](#huggingface) + - [HuggingFace Spaces](#huggingface-spaces) - [๐Ÿ–ฅ๏ธ Command Line Interface (CLI)](#๏ธ-command-line-interface-cli) - [Usage](#usage) - - [Environment Variables](#environment-variables) + - [Environment Variables](#environment-variables) - [Deployment](#deployment) - [Deploy Langflow on Google Cloud Platform](#deploy-langflow-on-google-cloud-platform) - [Deploy Langflow on Jina AI Cloud](#deploy-langflow-on-jina-ai-cloud) - - [API Usage](#api-usage) + - [API Usage](#api-usage) - [Deploy on Railway](#deploy-on-railway) - [Deploy on Render](#deploy-on-render) - [๐ŸŽจ Creating Flows](#-creating-flows) @@ -46,7 +46,7 @@ # ๐Ÿ“ฆ Installation -## Locally +### Locally You can install Langflow from pip: @@ -81,7 +81,7 @@ or langflow # or langflow --help ``` -## HuggingFace +### HuggingFace Spaces You can also check it out on [HuggingFace Spaces](https://huggingface.co/spaces/Logspace/Langflow) and run it in your browser! You can even clone it and have your own copy of Langflow to play with. @@ -107,6 +107,7 @@ Each option is detailed below: - `--config`: Defines the path to the configuration file. The default is `config.yaml`. - `--env-file`: Specifies the path to the .env file containing environment variables. The default is `.env`. - `--log-level`: Defines the logging level. Can be set using the `LANGFLOW_LOG_LEVEL` environment variable. The default is `critical`. +- `--components-path`: Specifies the path to the directory containing custom components. Can be set using the `LANGFLOW_COMPONENTS_PATH` environment variable. The default is `langflow/components`. - `--log-file`: Specifies the path to the log file. Can be set using the `LANGFLOW_LOG_FILE` environment variable. The default is `logs/langflow.log`. - `--cache`: Selects the type of cache to use. Options are `InMemoryCache` and `SQLiteCache`. Can be set using the `LANGFLOW_LANGCHAIN_CACHE` environment variable. The default is `SQLiteCache`. - `--jcloud/--no-jcloud`: Toggles the option to deploy on Jina AI Cloud. The default is `no-jcloud`. @@ -161,33 +162,33 @@ langflow --jcloud
Show complete (example) output - ```text - ๐Ÿš€ Deploying Langflow server on Jina AI Cloud - โ•ญโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ ๐ŸŽ‰ Flow is available! โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ•ฎ - โ”‚ โ”‚ - โ”‚ ID langflow-e3dd8820ec โ”‚ - โ”‚ Gateway (Websocket) wss://langflow-e3dd8820ec.wolf.jina.ai โ”‚ - โ”‚ Dashboard https://dashboard.wolf.jina.ai/flow/e3dd8820ec โ”‚ - โ”‚ โ”‚ - โ•ฐโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ•ฏ - โ•ญโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ฌโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ•ฎ - โ”‚ App ID โ”‚ langflow-e3dd8820ec โ”‚ - โ”œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ผโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ค - โ”‚ Phase โ”‚ Serving โ”‚ - โ”œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ผโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ค - โ”‚ Endpoint โ”‚ wss://langflow-e3dd8820ec.wolf.jina.ai โ”‚ - โ”œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ผโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ค - โ”‚ App logs โ”‚ dashboards.wolf.jina.ai โ”‚ - โ”œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ผโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ค - โ”‚ Swagger UI โ”‚ https://langflow-e3dd8820ec.wolf.jina.ai/docs โ”‚ - โ”œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ผโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ค - โ”‚ OpenAPI JSON โ”‚ https://langflow-e3dd8820ec.wolf.jina.ai/openapi.json โ”‚ - โ•ฐโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ดโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ•ฏ - - ๐ŸŽ‰ Langflow server successfully deployed on Jina AI Cloud ๐ŸŽ‰ - ๐Ÿ”— Click on the link to open the server (please allow ~1-2 minutes for the server to startup): https://langflow-e3dd8820ec.wolf.jina.ai/ - ๐Ÿ“– Read more about managing the server: https://github.com/jina-ai/langchain-serve - ``` +```text + ๐Ÿš€ Deploying Langflow server on Jina AI Cloud + โ•ญโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ ๐ŸŽ‰ Flow is available! โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ•ฎ + โ”‚ โ”‚ + โ”‚ ID langflow-e3dd8820ec โ”‚ + โ”‚ Gateway (Websocket) wss://langflow-e3dd8820ec.wolf.jina.ai โ”‚ + โ”‚ Dashboard https://dashboard.wolf.jina.ai/flow/e3dd8820ec โ”‚ + โ”‚ โ”‚ + โ•ฐโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ•ฏ + โ•ญโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ฌโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ•ฎ + โ”‚ App ID โ”‚ langflow-e3dd8820ec โ”‚ + โ”œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ผโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ค + โ”‚ Phase โ”‚ Serving โ”‚ + โ”œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ผโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ค + โ”‚ Endpoint โ”‚ wss://langflow-e3dd8820ec.wolf.jina.ai โ”‚ + โ”œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ผโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ค + โ”‚ App logs โ”‚ dashboards.wolf.jina.ai โ”‚ + โ”œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ผโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ค + โ”‚ Swagger UI โ”‚ https://langflow-e3dd8820ec.wolf.jina.ai/docs โ”‚ + โ”œโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ผโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ค + โ”‚ OpenAPI JSON โ”‚ https://langflow-e3dd8820ec.wolf.jina.ai/openapi.json โ”‚ + โ•ฐโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”ดโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ•ฏ + + ๐ŸŽ‰ Langflow server successfully deployed on Jina AI Cloud ๐ŸŽ‰ + ๐Ÿ”— Click on the link to open the server (please allow ~1-2 minutes for the server to startup): https://langflow-e3dd8820ec.wolf.jina.ai/ + ๐Ÿ“– Read more about managing the server: https://github.com/jina-ai/langchain-serve +```
@@ -198,7 +199,7 @@ You can use Langflow directly on your browser, or use the API endpoints on Jina
Show API usage (with python) - ```python +```python import requests BASE_API_URL = "https://langflow-e3dd8820ec.wolf.jina.ai/api/v1/predict" @@ -206,38 +207,38 @@ FLOW_ID = "864c4f98-2e59-468b-8e13-79cd8da07468" # You can tweak the flow by adding a tweaks dictionary # e.g {"OpenAI-XXXXX": {"model_name": "gpt-4"}} TWEAKS = { - "ChatOpenAI-g4jEr": {}, - "ConversationChain-UidfJ": {} +"ChatOpenAI-g4jEr": {}, +"ConversationChain-UidfJ": {} } def run_flow(message: str, flow_id: str, tweaks: dict = None) -> dict: - """ - Run a flow with a given message and optional tweaks. + """ + Run a flow with a given message and optional tweaks. - :param message: The message to send to the flow - :param flow_id: The ID of the flow to run - :param tweaks: Optional tweaks to customize the flow - :return: The JSON response from the flow - """ - api_url = f"{BASE_API_URL}/{flow_id}" + :param message: The message to send to the flow + :param flow_id: The ID of the flow to run + :param tweaks: Optional tweaks to customize the flow + :return: The JSON response from the flow + """ + api_url = f"{BASE_API_URL}/{flow_id}" - payload = {"message": message} + payload = {"message": message} - if tweaks: - payload["tweaks"] = tweaks + if tweaks: + payload["tweaks"] = tweaks - response = requests.post(api_url, json=payload) - return response.json() + response = requests.post(api_url, json=payload) + return response.json() # Setup any tweaks you want to apply to the flow print(run_flow("Your message", flow_id=FLOW_ID, tweaks=TWEAKS)) - ``` +``` - ```json - { - "result": "Great choice! Bangalore in the 1920s was a vibrant city with a rich cultural and political scene. Here are some suggestions for things to see and do:\n\n1. Visit the Bangalore Palace - built in 1887, this stunning palace is a perfect example of Tudor-style architecture. It was home to the Maharaja of Mysore and is now open to the public.\n\n2. Attend a performance at the Ravindra Kalakshetra - this cultural center was built in the 1920s and is still a popular venue for music and dance performances.\n\n3. Explore the neighborhoods of Basavanagudi and Malleswaram - both of these areas have retained much of their old-world charm and are great places to walk around and soak up the atmosphere.\n\n4. Check out the Bangalore Club - founded in 1868, this exclusive social club was a favorite haunt of the British expat community in the 1920s.\n\n5. Attend a meeting of the Indian National Congress - founded in 1885, the INC was a major force in the Indian independence movement and held many meetings and rallies in Bangalore in the 1920s.\n\nHope you enjoy your trip to 1920s Bangalore!" - } - ``` +```json +{ + "result": "Great choice! Bangalore in the 1920s was a vibrant city with a rich cultural and political scene. Here are some suggestions for things to see and do:\n\n1. Visit the Bangalore Palace - built in 1887, this stunning palace is a perfect example of Tudor-style architecture. It was home to the Maharaja of Mysore and is now open to the public.\n\n2. Attend a performance at the Ravindra Kalakshetra - this cultural center was built in the 1920s and is still a popular venue for music and dance performances.\n\n3. Explore the neighborhoods of Basavanagudi and Malleswaram - both of these areas have retained much of their old-world charm and are great places to walk around and soak up the atmosphere.\n\n4. Check out the Bangalore Club - founded in 1868, this exclusive social club was a favorite haunt of the British expat community in the 1920s.\n\n5. Attend a meeting of the Indian National Congress - founded in 1885, the INC was a major force in the Indian independence movement and held many meetings and rallies in Bangalore in the 1920s.\n\nHope you enjoy your trip to 1920s Bangalore!" +} +```
diff --git a/docs/docs/components/chains.mdx b/docs/docs/components/chains.mdx index 52de6c4819..96dcac2d0f 100644 --- a/docs/docs/components/chains.mdx +++ b/docs/docs/components/chains.mdx @@ -1,6 +1,7 @@ import ThemedImage from "@theme/ThemedImage"; import useBaseUrl from "@docusaurus/useBaseUrl"; import ZoomableImage from "/src/theme/ZoomableImage.js"; +import Admonition from "@theme/Admonition"; # Chains @@ -12,22 +13,23 @@ Chains, in the context of language models, refer to a series of calls made to a The `CombineDocsChain` incorporates methods to combine or aggregate loaded documents for question-answering functionality. -:::info + Works as a proxy of LangChainโ€™s [documents](https://python.langchain.com/docs/modules/chains/document/) chains generated by the `load_qa_chain` function. -::: + **Params** - **LLM:** Language Model to use in the chain. - **chain_type:** The chain type to be used. Each one of them applies a different โ€œcombination strategyโ€. - - **stuff**: The stuff [documents](https://python.langchain.com/docs/modules/chains/document/stuff) chain (โ€œstuff" as in "to stuff" or "to fill") is the most straightforward of *the* document chains. It takes a list of documents, inserts them all into a prompt, and passes that prompt to an LLM. This chain is well-suited for applications where documents are small and only a few are passed in for most calls. - - **map_reduce**: The map-reduce [documents](https://python.langchain.com/docs/modules/chains/document/map_reduce) chain first applies an LLM chain to each document individually (the Map step), treating the chain output as a new document. It then passes all the new documents to a separate combined documents chain to get a single output (the Reduce step). It can optionally first compress or collapse the mapped documents to make sure that they fit in the combined documents chain (which will often pass them to an LLM). This compression step is performed recursively if necessary. - - **map_rerank**: The map re-rank [documents](https://python.langchain.com/docs/modules/chains/document/map_rerank) chain runs an initial prompt on each document that not only tries to complete a task but also gives a score for how certain it is in its answer. The highest-scoring response is returned. - - **refine**: The refine [documents](https://python.langchain.com/docs/modules/chains/document/refine) chain constructs a response by looping over the input documents and iteratively updating its answer. For each document, it passes all non-document inputs, the current document, and the latest intermediate answer to an LLM chain to get a new answer. - Since the Refine chain only passes a single document to the LLM at a time, it is well-suited for tasks that require analyzing more documents than can fit in the model's context. The obvious tradeoff is that this chain will make far more LLM calls than, for example, the Stuff documents chain. There are also certain tasks that are difficult to accomplish iteratively. For example, the Refine chain can perform poorly when documents frequently cross-reference one another or when a task requires detailed information from many documents. + - **stuff**: The stuff [documents](https://python.langchain.com/docs/modules/chains/document/stuff) chain (โ€œstuff" as in "to stuff" or "to fill") is the most straightforward of _the_ document chains. It takes a list of documents, inserts them all into a prompt, and passes that prompt to an LLM. This chain is well-suited for applications where documents are small and only a few are passed in for most calls. + - **map_reduce**: The map-reduce [documents](https://python.langchain.com/docs/modules/chains/document/map_reduce) chain first applies an LLM chain to each document individually (the Map step), treating the chain output as a new document. It then passes all the new documents to a separate combined documents chain to get a single output (the Reduce step). It can optionally first compress or collapse the mapped documents to make sure that they fit in the combined documents chain (which will often pass them to an LLM). This compression step is performed recursively if necessary. + - **map_rerank**: The map re-rank [documents](https://python.langchain.com/docs/modules/chains/document/map_rerank) chain runs an initial prompt on each document that not only tries to complete a task but also gives a score for how certain it is in its answer. The highest-scoring response is returned. + - **refine**: The refine [documents](https://python.langchain.com/docs/modules/chains/document/refine) chain constructs a response by looping over the input documents and iteratively updating its answer. For each document, it passes all non-document inputs, the current document, and the latest intermediate answer to an LLM chain to get a new answer. + + Since the Refine chain only passes a single document to the LLM at a time, it is well-suited for tasks that require analyzing more documents than can fit in the model's context. The obvious tradeoff is that this chain will make far more LLM calls than, for example, the Stuff documents chain. There are also certain tasks that are difficult to accomplish iteratively. For example, the Refine chain can perform poorly when documents frequently cross-reference one another or when a task requires detailed information from many documents. --- @@ -41,7 +43,7 @@ The `ConversationChain` is a straightforward chain for interactive conversations - **Memory:** Default memory store. - **input_key:** Used to specify the key under which the user input will be stored in the conversation memory. It allows you to provide the user's input to the chain for processing and generating a response. - **output_key:** Used to specify the key under which the generated response will be stored in the conversation memory. It allows you to retrieve the response using the specified key. -- **verbose:** This parameter is used to control the level of detail in the output of the chain. When set to True, it will print out some internal states of the chain while it is being run, which can be helpful for debugging and understanding the chain's behavior. If set to False, it will suppress the verbose output โ€” defaults to `False`. +- **verbose:** This parameter is used to control the level of detail in the output of the chain. When set to True, it will print out some internal states of the chain while it is being run, which can be helpful for debugging and understanding the chain's behavior. If set to False, it will suppress the verbose output โ€” defaults to `False`. --- @@ -49,11 +51,11 @@ The `ConversationChain` is a straightforward chain for interactive conversations The `ConversationalRetrievalChain` extracts information and provides answers by combining document search and question-answering abilities. -:::info + A retriever is a component that finds documents based on a query. It doesn't store the documents themselves, but it returns the ones that match the query. -::: + **Params** @@ -61,12 +63,13 @@ A retriever is a component that finds documents based on a query. It doesn't sto - **Memory:** Default memory store. - **Retriever:** The retriever used to fetch relevant documents. - **chain_type:** The chain type to be used. Each one of them applies a different โ€œcombination strategyโ€. - - **stuff**: The stuff [documents](https://python.langchain.com/docs/modules/chains/document/stuff) chain (โ€œstuff" as in "to stuff" or "to fill") is the most straightforward of *the* document chains. It takes a list of documents, inserts them all into a prompt, and passes that prompt to an LLM. This chain is well-suited for applications where documents are small and only a few are passed in for most calls. - - **map_reduce**: The map-reduce [documents](https://python.langchain.com/docs/modules/chains/document/map_reduce) chain first applies an LLM chain to each document individually (the Map step), treating the chain output as a new document. It then passes all the new documents to a separate combined documents chain to get a single output (the Reduce step). It can optionally first compress or collapse the mapped documents to make sure that they fit in the combined documents chain (which will often pass them to an LLM). This compression step is performed recursively if necessary. - - **map_rerank**: The map re-rank [documents](https://python.langchain.com/docs/modules/chains/document/map_rerank) chain runs an initial prompt on each document that not only tries to complete a task but also gives a score for how certain it is in its answer. The highest-scoring response is returned. - - **refine**: The refine [documents](https://python.langchain.com/docs/modules/chains/document/refine) chain constructs a response by looping over the input documents and iteratively updating its answer. For each document, it passes all non-document inputs, the current document, and the latest intermediate answer to an LLM chain to get a new answer. - Since the Refine chain only passes a single document to the LLM at a time, it is well-suited for tasks that require analyzing more documents than can fit in the model's context. The obvious tradeoff is that this chain will make far more LLM calls than, for example, the Stuff documents chain. There are also certain tasks that are difficult to accomplish iteratively. For example, the Refine chain can perform poorly when documents frequently cross-reference one another or when a task requires detailed information from many documents. + - **stuff**: The stuff [documents](https://python.langchain.com/docs/modules/chains/document/stuff) chain (โ€œstuff" as in "to stuff" or "to fill") is the most straightforward of _the_ document chains. It takes a list of documents, inserts them all into a prompt, and passes that prompt to an LLM. This chain is well-suited for applications where documents are small and only a few are passed in for most calls. + - **map_reduce**: The map-reduce [documents](https://python.langchain.com/docs/modules/chains/document/map_reduce) chain first applies an LLM chain to each document individually (the Map step), treating the chain output as a new document. It then passes all the new documents to a separate combined documents chain to get a single output (the Reduce step). It can optionally first compress or collapse the mapped documents to make sure that they fit in the combined documents chain (which will often pass them to an LLM). This compression step is performed recursively if necessary. + - **map_rerank**: The map re-rank [documents](https://python.langchain.com/docs/modules/chains/document/map_rerank) chain runs an initial prompt on each document that not only tries to complete a task but also gives a score for how certain it is in its answer. The highest-scoring response is returned. + - **refine**: The refine [documents](https://python.langchain.com/docs/modules/chains/document/refine) chain constructs a response by looping over the input documents and iteratively updating its answer. For each document, it passes all non-document inputs, the current document, and the latest intermediate answer to an LLM chain to get a new answer. + + Since the Refine chain only passes a single document to the LLM at a time, it is well-suited for tasks that require analyzing more documents than can fit in the model's context. The obvious tradeoff is that this chain will make far more LLM calls than, for example, the Stuff documents chain. There are also certain tasks that are difficult to accomplish iteratively. For example, the Refine chain can perform poorly when documents frequently cross-reference one another or when a task requires detailed information from many documents. - **return_source_documents:** Used to specify whether or not to include the source documents that were used to answer the question in the output. When set to `True`, source documents will be included in the output along with the generated answer. This can be useful for providing additional context or references to the user โ€” defaults to `True`. - **verbose:** Whether or not to run in verbose mode. In verbose mode, intermediate logs will be printed to the console โ€” defaults to `False`. @@ -108,17 +111,17 @@ The `LLMMathChain` works by using the language model with an `LLMChain` to under `RetrievalQA` is a chain used to find relevant documents or information to answer a given query. The retriever is responsible for returning the relevant documents based on the query, and the QA component then extracts the answer from those documents. The retrieval QA system combines the capabilities of both the retriever and the QA component to provide accurate and relevant answers to user queries. -:::info + A retriever is a component that finds documents based on a query. It doesn't store the documents themselves, but it returns the ones that match the query. -::: + **Params** - **Combine Documents Chain:** Chain to use to combine the documents. - **Memory:** Default memory store. -- **Retriever:** The retriever used to fetch relevant documents. +- **Retriever:** The retriever used to fetch relevant documents. - **input_key:** This parameter is used to specify the key in the input data that contains the question. It is used to retrieve the question from the input data and pass it to the question-answering model for generating the answer โ€” defaults to `query`. - **output_key:** This parameter is used to specify the key in the output data where the generated answer will be stored. It is used to retrieve the answer from the output data after the question-answering model has generated it โ€” defaults to `result`. - **return_source_documents:** Used to specify whether or not to include the source documents that were used to answer the question in the output. When set to `True`, source documents will be included in the output along with the generated answer. This can be useful for providing additional context or references to the user โ€” defaults to `True`. @@ -134,4 +137,4 @@ The `SQLDatabaseChain` finds answers to questions using a SQL database. It works - **Db:** SQL Database to connect to. - **LLM:** Language Model to use in the chain. -- **Prompt:** Prompt template to translate natural language to SQL. \ No newline at end of file +- **Prompt:** Prompt template to translate natural language to SQL. diff --git a/docs/docs/components/custom.mdx b/docs/docs/components/custom.mdx new file mode 100644 index 0000000000..ffa747c1bb --- /dev/null +++ b/docs/docs/components/custom.mdx @@ -0,0 +1,92 @@ +import Admonition from "@theme/Admonition"; + +# Custom Components + +Used to create a custom component, a special type of Langflow component that allows users to extend the functionality of the platform by creating their own reusable and configurable components from a Python script. + +To use a custom component, follow these steps: + +- Create a class that inherits from _`langflow.CustomComponent`_ and contains a _`build`_ method. +- Use arguments with [Type Annotations (or Type Hints)](https://docs.python.org/3/library/typing.html) of the _`build`_ method to create component fields. +- If applicable, use the _`build_config`_ method to customize how these fields look and behave. + + + +For an in-depth explanation of custom components, their rules, and applications, make sure to read [Custom Component guidelines](../guidelines/custom-component). + + + +**Params** + +- **Code:** The Python code to define the component. + +## The CustomComponent Class + +The CustomComponent class serves as the foundation for creating custom components. By inheriting this class, users can create new, configurable components, tailored to their specific requirements. + +**Methods** + +- **build**: This method is required within a Custom Component class. It defines the component's functionality and specifies how it processes input data to produce output data. This method is called when the component is built (i.e., when you click the _Build_ โšก button in the canvas). + + The type annotations of the _`build`_ instance method are used to create the fields of the component. + + | Supported Types | + | --------------------------------------------------------- | + | _`str`_, _`int`_, _`float`_, _`bool`_, _`list`_, _`dict`_ | + | _`langchain.chains.base.Chain`_ | + | _`langchain.PromptTemplate`_ | + | _`langchain.llms.base.BaseLLM`_ | + | _`langchain.Tool`_ | + | _`langchain.document_loaders.base.BaseLoader`_ | + | _`langchain.schema.Document`_ | + | _`langchain.text_splitters.TextSplitter`_ | + | _`langchain.vectorstores.base.VectorStore`_ | + | _`langchain.embeddings.base.Embeddings`_ | + | _`langchain.schema.BaseRetriever`_ | + + + Unlike Langchain types, base Python types do not add a + [handle](../guidelines/components) to the field by default. To add handles, + use the _`input_types`_ key in the _`build_config`_ method. + + +- **build_config**: Used to define the configuration fields of the component (if applicable). It should always return a dictionary with specific keys representing the field names and corresponding configurations. This method is called when the code is processed (i.e., when you click _Check and Save_ in the code editor). It must follow the format described below: + + - Top-level keys are field names. + - Their values are also of type _`dict`_. They specify the behavior of the generated fields. + + Below are the available keys used to configure component fields: + + | Key | Description | + | -------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | + | _`field_type: str`_ | The type of the field (can be any of the types supported by the _`build`_ method). | + | _`is_list: bool`_ | If the field can be a list of values, meaning that the user can manually add more inputs to the same field. | + | _`options: List[str]`_ | When defined, the field becomes a dropdown menu where a list of strings defines the options to be displayed. If the _`value`_ attribute is set to one of the options, that option becomes default. For this parameter to work, _`field_type`_ should invariably be _`str`_. | + | _`multiline: bool`_ | Defines if a string field opens a text editor. Useful for longer texts. | + | _`input_types: List[str]`_ | Used when you want a _`str`_ field to have connectable handles. | + | _`display_name: str`_ | Defines the name of the field. | + | _`advanced: bool`_ | Hide the field in the canvas view (displayed component settings only). Useful when a field is for advanced users. | + | _`password: bool`_ | To mask the input text. Useful to hide sensitive text (e.g. API keys). | + | _`required: bool`_ | Makes the field required. | + | _`info: str`_ | Adds a tooltip to the field. | + | _`file_types: List[str]`_ | This is a requirement if the _`field_type`_ is _file_. Defines which file types will be accepted. For example, _json_, _yaml_ or _yml_. | + +- The CustomComponent class also provides helpful methods for specific tasks (e.g., to load and use other flows from the Langflow platform): + + | Method Name | Description | + | -------------- | ------------------------------------------------------------------- | + | _`list_flows`_ | Returns a list of Flow objects with an _`id`_ and a _`name`_. | + | _`get_flow`_ | Returns a Flow object. Parameters are _`flow_name`_ or _`flow_id`_. | + | _`load_flow`_ | Loads a flow from a given _`id`_. | + +- Useful attributes: + + | Attribute Name | Description | + | -------------- | ----------------------------------------------------------------------------- | + | _`repr_value`_ | Displays the value it receives in the _`build`_ method. Useful for debugging. | + + + + Check out the [FlowRunner](../examples/flow-runner) example to understand how to call a flow from a custom component. + + diff --git a/docs/docs/components/prompts.mdx b/docs/docs/components/prompts.mdx index f4f2c4cae0..0c72572728 100644 --- a/docs/docs/components/prompts.mdx +++ b/docs/docs/components/prompts.mdx @@ -1,3 +1,5 @@ +import Admonition from "@theme/Admonition"; + # Prompts A prompt refers to the input given to a language model. It is constructed from multiple components and can be parametrized using prompt templates. A prompt template is a reproducible way to generate prompts and allow for easy customization through input variables. @@ -8,8 +10,10 @@ A prompt refers to the input given to a language model. It is constructed from m The `PromptTemplate` component allows users to create prompts and define variables that provide control over instructing the model. The template can take in a set of variables from the end user and generates the prompt once the conversation is initiated. -:::info -Once a variable is defined in the prompt template, it becomes a component input of its own. Check out [Prompt Customization](../guidelines/prompt-customization.mdx) to learn more. -::: + + Once a variable is defined in the prompt template, it becomes a component + input of its own. Check out [Prompt + Customization](../guidelines/prompt-customization.mdx) to learn more. + -- **template:** Template used to format an individual request. \ No newline at end of file +- **template:** Template used to format an individual request. diff --git a/docs/docs/contributing/how-contribute.md b/docs/docs/contributing/how-contribute.md index cdccc271f3..53b4304962 100644 --- a/docs/docs/contributing/how-contribute.md +++ b/docs/docs/contributing/how-contribute.md @@ -36,10 +36,9 @@ Before you start, make sure you have the following installed: - Poetry (>=1.4) - Node.js -Then install the dependencies and start the development server for the backend: +Then, in the root folder, install the dependencies and start the development server for the backend: ```bash -make install_backend make backend ``` @@ -49,6 +48,7 @@ And the frontend: make frontend ``` + --- ## Docker compose @@ -59,4 +59,19 @@ The following snippet will run the backend and frontend in separate containers. docker compose up --build # or make dev build=1 -``` \ No newline at end of file +``` + +--- + +## Documentation + +The documentation is built using [Docusaurus](https://docusaurus.io/). To run the documentation locally, run the following commands: + +```bash +cd docs +npm install +npm run start +``` + +The documentation will be available at `localhost:3000` and all the files are located in the `docs/docs` folder. +Once you are done with your changes, you can create a Pull Request to the `main` branch. diff --git a/docs/docs/examples/buffer-memory.mdx b/docs/docs/examples/buffer-memory.mdx index c3e886cf98..d34649991d 100644 --- a/docs/docs/examples/buffer-memory.mdx +++ b/docs/docs/examples/buffer-memory.mdx @@ -1,3 +1,5 @@ +import Admonition from "@theme/Admonition"; + # Buffer Memory For certain applications, retaining past interactions is crucial. For that, chains and agents may accept a memory component as one of their input parameters. The `ConversationBufferMemory` component is one of them. It stores messages and extracts them into variables. @@ -17,9 +19,10 @@ import ZoomableImage from "/src/theme/ZoomableImage.js"; #### Download Flow -:::note LangChain Components ๐Ÿฆœ๐Ÿ”— + - [`ConversationBufferMemory`](https://python.langchain.com/docs/modules/memory/how_to/buffer) - [`ConversationChain`](https://python.langchain.com/docs/modules/chains/) - [`ChatOpenAI`](https://python.langchain.com/docs/modules/model_io/models/chat/integrations/openai) - ::: + + diff --git a/docs/docs/examples/conversation-chain.mdx b/docs/docs/examples/conversation-chain.mdx index b8cbb11bb1..db31818811 100644 --- a/docs/docs/examples/conversation-chain.mdx +++ b/docs/docs/examples/conversation-chain.mdx @@ -1,10 +1,14 @@ +import Admonition from "@theme/Admonition"; + # Conversation Chain This example shows how to instantiate a simple `ConversationChain` component using a Language Model (LLM). Once the Node Status turns green ๐ŸŸข, the chat will be ready to take in user messages. Here, we used `ChatOpenAI` to act as the required LLM input, but you can use any LLM for this purpose. -:::info + + Make sure to always get the API key from the provider. -::: + + ## โ›“๏ธ Langflow Example @@ -21,8 +25,9 @@ import ZoomableImage from "/src/theme/ZoomableImage.js"; #### Download Flow -:::note LangChain Components ๐Ÿฆœ๐Ÿ”— + - [`ConversationChain`](https://python.langchain.com/docs/modules/chains/) - [`ChatOpenAI`](https://python.langchain.com/docs/modules/model_io/models/chat/integrations/openai) - ::: + + diff --git a/docs/docs/examples/csv-loader.mdx b/docs/docs/examples/csv-loader.mdx index de808ec3d7..c59dfc1e76 100644 --- a/docs/docs/examples/csv-loader.mdx +++ b/docs/docs/examples/csv-loader.mdx @@ -1,3 +1,5 @@ +import Admonition from "@theme/Admonition"; + # CSV Loader The `VectoStoreAgent` component retrieves information from one or more vector stores. This example shows a `VectoStoreAgent` connected to a CSV file through the `Chroma` vector store. Process description: @@ -7,13 +9,18 @@ The `VectoStoreAgent` component retrieves information from one or more vector st - These chunks feed the `Chroma` vector store, which converts them into vectors and stores them for fast indexing. - Finally, the agent accesses the information of the vector store through the `VectorStoreInfo` tool. -:::info -The vector store is used for efficient semantic search, while `VectorStoreInfo` carries information about it, such as its name and description. Embeddings are a way to represent words, phrases, or any entities in a vector space. Learn more about them [here](https://platform.openai.com/docs/guides/embeddings/what-are-embeddings). -::: + + The vector store is used for efficient semantic search, while + `VectorStoreInfo` carries information about it, such as its name and + description. Embeddings are a way to represent words, phrases, or any entities + in a vector space. Learn more about them + [here](https://platform.openai.com/docs/guides/embeddings/what-are-embeddings). + -:::tip -Once you build this flow, ask questions about the data in the chat interface (e.g., number of rows or columns). -::: + + Once you build this flow, ask questions about the data in the chat interface + (e.g., number of rows or columns). + ## โ›“๏ธ Langflow Example @@ -30,7 +37,7 @@ import ZoomableImage from "/src/theme/ZoomableImage.js"; #### Download Flow -:::note LangChain Components ๐Ÿฆœ๐Ÿ”— + - [`CSVLoader`](https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/csv) - [`CharacterTextSplitter`](https://python.langchain.com/docs/modules/data_connection/document_transformers/text_splitters/character_text_splitter) @@ -39,4 +46,5 @@ import ZoomableImage from "/src/theme/ZoomableImage.js"; - [`VectorStoreInfo`](https://python.langchain.com/docs/modules/data_connection/vectorstores/) - [`OpenAI`](https://python.langchain.com/docs/modules/model_io/models/llms/integrations/openai) - [`VectorStoreAgent`](https://python.langchain.com/docs/modules/agents/toolkits/vectorstore) - ::: + + diff --git a/docs/docs/examples/flow-runner.mdx b/docs/docs/examples/flow-runner.mdx new file mode 100644 index 0000000000..151c7d182a --- /dev/null +++ b/docs/docs/examples/flow-runner.mdx @@ -0,0 +1,365 @@ +--- +description: Custom Components +hide_table_of_contents: true +--- + +import ZoomableImage from "/src/theme/ZoomableImage.js"; +import Admonition from "@theme/Admonition"; + +# FlowRunner Component + +The CustomComponent class allows us to create components that interact with Langflow itself. In this example, we will make a component that runs other flows available in "My Collection". + + + +We will cover how to: + +- List Collection flows using the _`list_flows`_ method. +- Load a flow using the _`load_flow`_ method. +- Configure a dropdown input field using the _`options`_ parameter. + +
+ +Example Code + +```python +from langflow import CustomComponent +from langchain.schema import Document + +class FlowRunner(CustomComponent): +display_name = "Flow Runner" +description = "Run other flows using a document as input." + + def build_config(self): + flows = self.list_flows() + flow_names = [f.name for f in flows] + return {"flow_name": {"options": flow_names, + "display_name": "Flow Name", + }, + "document": {"display_name": "Document"} + } + + + def build(self, flow_name: str, document: Document) -> Document: + # List the flows + flows = self.list_flows() + # Get the flow that matches the selected name + # You can also get the flow by id + # using self.get_flow(flow_id=flow_id) + tweaks = {} + flow = self.get_flow(flow_name=flow_name, tweaks=tweaks) + # Get the page_content from the document + if document and isinstance(document, list): + document = document[0] + page_content = document.page_content + # Use it in the flow + result = flow(page_content) + return Document(page_content=str(result)) + +``` + +
+ + + +```python +from langflow import CustomComponent + + +class MyComponent(CustomComponent): + display_name = "Custom Component" + description = "This is a custom component" + + def build_config(self): + ... + + def build(self): + ... + +``` + +The typical structure of a Custom Component is composed of _`display_name`_ and _`description`_ attributes, _`build`_ and _`build_config`_ methods. + +--- + +```python +from langflow import CustomComponent + + +# focus +class FlowRunner(CustomComponent): + # focus + display_name = "Flow Runner" + # focus + description = "Run other flows" + + def build_config(self): + ... + + def build(self): + ... + +``` + +Let's start by defining our component's _`display_name`_ and _`description`_. + +--- + +```python +from langflow import CustomComponent +# focus +from langchain.schema import Document + + +class FlowRunner(CustomComponent): + display_name = "Flow Runner" + description = "Run other flows using a document as input." + + def build_config(self): + ... + + def build(self): + ... + +``` + +Second, we will import _`Document`_ from the [_langchain.schema_](https://docs.langchain.com/docs/components/schema/) module. This will be the return type of the _`build`_ method. + +--- + +```python +from langflow import CustomComponent +# focus +from langchain.schema import Document + + +class FlowRunner(CustomComponent): + display_name = "Flow Runner" + description = "Run other flows using a document as input." + + def build_config(self): + ... + + # focus + def build(self, flow_name: str, document: Document) -> Document: + ... + +``` + +Now, let's add the [parameters](focus://11[20:55]) and the [return type](focus://11[60:69]) to the _`build`_ method. The parameters added are: + +- _`flow_name`_ is the name of the flow we want to run. +- _`document`_ is the input document to be passed to that flow. + - Since _`Document`_ is a Langchain type, it will add an input [handle](../guidelines/components) to the component ([see more](../components/custom)). + +--- + +```python focus=13:14 +from langflow import CustomComponent +from langchain.schema import Document + + +class FlowRunner(CustomComponent): + display_name = "Flow Runner" + description = "Run other flows using a document as input." + + def build_config(self): + ... + + def build(self, flow_name: str, document: Document) -> Document: + # List the flows + flows = self.list_flows() + +``` + +We can now start writing the _`build`_ method. Let's list available flows in "My Collection" using the _`list_flows`_ method. + +--- + +```python focus=15:18 +from langflow import CustomComponent +from langchain.schema import Document + + +class FlowRunner(CustomComponent): + display_name = "Flow Runner" + description = "Run other flows using a document as input." + + def build_config(self): + ... + + def build(self, flow_name: str, document: Document) -> Document: + # List the flows + flows = self.list_flows() + # Get the flow that matches the selected name + # You can also get the flow by id + # using self.get_flow(flow_id=flow_id) + tweaks = {} + flow = self.get_flow(flow_name=flow_name, tweaks=tweaks) + +``` + +And retrieve a flow that matches the selected name (we'll make a dropdown input field for the user to choose among flow names). + + + From version 0.4.0, names are unique, which was not the case in previous + versions. This might lead to unexpected results if using flows with the same + name. + + +--- + +```python +from langflow import CustomComponent +from langchain.schema import Document + + +class FlowRunner(CustomComponent): + display_name = "Flow Runner" + description = "Run other flows using a document as input." + + def build_config(self): + ... + + def build(self, flow_name: str, document: Document) -> Document: + # List the flows + flows = self.list_flows() + # Get the flow that matches the selected name + # You can also get the flow by id + # using self.get_flow(flow_id=flow_id) + tweaks = {} + flow = self.get_flow(flow_name=flow_name, tweaks=tweaks) + + +``` + +You can load this flow using _`get_flow`_ and set a _`tweaks`_ dictionary to customize it. Find more about tweaks in our [features guidelines](../guidelines/features#code). + +--- + +```python +from langflow import CustomComponent +from langchain.schema import Document + + +class FlowRunner(CustomComponent): + display_name = "Flow Runner" + description = "Run other flows using a document as input." + + def build_config(self): + ... + + def build(self, flow_name: str, document: Document) -> Document: + # List the flows + flows = self.list_flows() + # Get the flow that matches the selected name + # You can also get the flow by id + # using self.get_flow(flow_id=flow_id) + tweaks = {} + flow = self.get_flow(flow_name=flow_name, tweaks=tweaks) + # Get the page_content from the document + if document and isinstance(document, list): + document = document[0] + page_content = document.page_content + # Use it in the flow + result = flow(page_content) + return Document(page_content=str(result)) +``` + +We are using a _`Document`_ as input because it is a straightforward way to pass text data in Langflow (specifically because you can connect it to many [loaders](../components/loaders)). +Generally, a flow will take a string or a dictionary as input because that's what LangChain components expect. +In case you are passing a dictionary, you need to build it according to the needs of the flow you are using. + +The content of a document can be extracted using the _`page_content`_ attribute, which is a string, and passed as an argument to the selected flow. + +--- + +```python focus=9:16 +from langflow import CustomComponent +from langchain.schema import Document + + +class FlowRunner(CustomComponent): + display_name = "Flow Runner" + description = "Run other flows using a document as input." + + def build_config(self): + flows = self.list_flows() + flow_names = [f.name for f in flows] + return {"flow_name": {"options": flow_names, + "display_name": "Flow Name", + }, + "document": {"display_name": "Document"} + } + + def build(self, flow_name: str, document: Document) -> Document: + # List the flows + flows = self.list_flows() + # Get the flow that matches the selected name + # You can also get the flow by id + # using self.get_flow(flow_id=flow_id) + tweaks = {} + flow = self.get_flow(flow_name=flow_name, tweaks=tweaks) + # Get the page_content from the document + if document and isinstance(document, list): + document = document[0] + page_content = document.page_content + # Use it in the flow + result = flow(page_content) + return Document(page_content=str(result)) +``` + +Finally, we can add field customizations through the _`build_config`_ method. Here we added the _`options`_ key to make the _`flow_name`_ field a dropdown menu. Check out the [custom component reference](../components/custom) for a list of available keys. + + + Make sure that the field type is _`str`_ and _`options`_ values are strings. + + + + +Done! This is what our script and custom component looks like: + +
+ + + + + +
diff --git a/docs/docs/examples/how-upload-examples.mdx b/docs/docs/examples/how-upload-examples.mdx index 8a43062125..2b1a2b06c7 100644 --- a/docs/docs/examples/how-upload-examples.mdx +++ b/docs/docs/examples/how-upload-examples.mdx @@ -7,16 +7,14 @@ import ZoomableImage from "/src/theme/ZoomableImage.js"; We welcome all examples that can help our community learn and explore Langflow's capabilities. Langflow Examples is a repository on [GitHub](https://github.com/logspace-ai/langflow_examples) that contains examples of flows that people can use for inspiration and learning. -
- -
+{" "} + To upload examples, please follow these steps: diff --git a/docs/docs/examples/midjourney-prompt-chain.mdx b/docs/docs/examples/midjourney-prompt-chain.mdx index d3ca57c917..c79bb0b277 100644 --- a/docs/docs/examples/midjourney-prompt-chain.mdx +++ b/docs/docs/examples/midjourney-prompt-chain.mdx @@ -1,3 +1,5 @@ +import Admonition from "@theme/Admonition"; + # MidJourney Prompt Chain The `MidJourneyPromptChain` can be used to generate imaginative and detailed MidJourney prompts. @@ -14,9 +16,11 @@ And get a response such as: Imagine a mysterious forest, the trees are tall and ancient, their branches reaching up to the sky. Through the darkness, a dragon emerges from the shadows, its scales shimmering in the moonlight. Its wingspan is immense, and its eyes glow with a fierce intensity. It is a majestic and powerful creature, one that commands both respect and fear. ``` -:::tip -Notice that the `ConversationSummaryMemory` stores a summary of the conversation over time. Try using it to create better prompts as the conversation goes on. -::: + + Notice that the `ConversationSummaryMemory` stores a summary of the + conversation over time. Try using it to create better prompts as the + conversation goes on. + ## โ›“๏ธ Langflow Example @@ -33,8 +37,9 @@ import ZoomableImage from "/src/theme/ZoomableImage.js"; #### Download Flow -:::note LangChain Components ๐Ÿฆœ๐Ÿ”— + - [`OpenAI`](https://python.langchain.com/docs/modules/model_io/models/llms/integrations/openai) - [`ConversationSummaryMemory`](https://python.langchain.com/docs/modules/memory/how_to/summary) - ::: + + diff --git a/docs/docs/examples/multiple-vectorstores.mdx b/docs/docs/examples/multiple-vectorstores.mdx index 36890c8668..0c9f11c4c8 100644 --- a/docs/docs/examples/multiple-vectorstores.mdx +++ b/docs/docs/examples/multiple-vectorstores.mdx @@ -1,26 +1,31 @@ +import Admonition from "@theme/Admonition"; + # Multiple Vector Stores The example below shows an agent operating with two vector stores built upon different data sources. The `TextLoader` loads a TXT file, while the `WebBaseLoader` pulls text from webpages into a document format to accessed downstream. The `Chroma` vector stores are created analogous to what we have demonstrated in our [CSV Loader](/examples/csv-loader.mdx) example. Finally, the `VectorStoreRouterAgent` constructs an agent that routes between the vector stores. -:::info -Get the TXT file used [here](https://github.com/hwchase17/chat-your-data/blob/master/state_of_the_union.txt). -::: + + Get the TXT file used + [here](https://github.com/hwchase17/chat-your-data/blob/master/state_of_the_union.txt). + URL used by the `WebBaseLoader`: -```txt +```text https://pt.wikipedia.org/wiki/Harry_Potter ``` -:::tip -When you build the flow, request information about one of the sources. The agent should be able to use the correct source to generate a response. -::: + + When you build the flow, request information about one of the sources. The + agent should be able to use the correct source to generate a response. + -:::info -Learn more about Multiple Vector Stores [here](https://python.langchain.com/docs/modules/agents/toolkits/vectorstore?highlight=Multiple%20Vector%20Stores#multiple-vectorstores). -::: + + Learn more about Multiple Vector Stores + [here](https://python.langchain.com/docs/modules/agents/toolkits/vectorstore?highlight=Multiple%20Vector%20Stores#multiple-vectorstores). + ## โ›“๏ธ Langflow Example @@ -37,7 +42,7 @@ import ZoomableImage from "/src/theme/ZoomableImage.js"; #### Download Flow -:::note LangChain Components ๐Ÿฆœ๐Ÿ”— + - [`WebBaseLoader`](https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/web_base) - [`TextLoader`](https://python.langchain.com/docs/modules/data_connection/document_loaders/integrations/unstructured_file) @@ -49,4 +54,4 @@ import ZoomableImage from "/src/theme/ZoomableImage.js"; - [`VectorStoreRouterToolkit`](https://python.langchain.com/docs/modules/agents/toolkits/vectorstore) - [`VectorStoreRouterAgent`](https://python.langchain.com/docs/modules/agents/toolkits/vectorstore) -::: + diff --git a/docs/docs/examples/python-function.mdx b/docs/docs/examples/python-function.mdx index 12a262a3ff..f537075c6b 100644 --- a/docs/docs/examples/python-function.mdx +++ b/docs/docs/examples/python-function.mdx @@ -1,3 +1,5 @@ +import Admonition from "@theme/Admonition"; + # Python Function Langflow allows you to create a customized tool using the `PythonFunction` connected to a `Tool` component. In this example, Regex is used in Python to validate a pattern. @@ -15,15 +17,19 @@ def is_brazilian_zipcode(zipcode: str) -> bool: return False ``` -:::tip -When a tool is called, it is often desirable to have its output returned directly to the user. You can do this by setting the **return_direct** flag for a tool to be True. -::: + + When a tool is called, it is often desirable to have its output returned + directly to the user. You can do this by setting the **return_direct** flag + for a tool to be True. + The `AgentInitializer` component is a quick way to construct an agent from the model and tools. -:::info -The `PythonFunction` is a custom component that uses the LangChain ๐Ÿฆœ๐Ÿ”— tool decorator. Learn more about it [here](https://python.langchain.com/docs/modules/agents/tools/how_to/custom_tools). -::: + + The `PythonFunction` is a custom component that uses the LangChain ๐Ÿฆœ๐Ÿ”— tool + decorator. Learn more about it + [here](https://python.langchain.com/docs/modules/agents/tools/how_to/custom_tools). + ## โ›“๏ธ Langflow Example @@ -40,9 +46,10 @@ import ZoomableImage from "/src/theme/ZoomableImage.js"; #### Download Flow -:::note LangChain Components ๐Ÿฆœ๐Ÿ”— + - [`PythonFunctionTool`](https://python.langchain.com/docs/modules/agents/tools/how_to/custom_tools) - [`ChatOpenAI`](https://python.langchain.com/docs/modules/model_io/models/chat/integrations/openai) - [`AgentInitializer`](https://python.langchain.com/docs/modules/agents/) - ::: + + diff --git a/docs/docs/examples/serp-api-tool.mdx b/docs/docs/examples/serp-api-tool.mdx index a7e1d3d8e2..60e55791a9 100644 --- a/docs/docs/examples/serp-api-tool.mdx +++ b/docs/docs/examples/serp-api-tool.mdx @@ -1,24 +1,29 @@ +import Admonition from "@theme/Admonition"; + # Serp API Tool The [Serp API](https://serpapi.com/) (Search Engine Results Page) allows developers to scrape results from search engines such as Google, Bing and Yahoo, and can be used as in Langflow through the `Search` component. -:::info -To use the Serp API, you first need to sign up [Serp API](https://serpapi.com/) for an API key on the provider's website. -::: + + To use the Serp API, you first need to sign up [Serp + API](https://serpapi.com/) for an API key on the provider's website. + Here, the `ZeroShotPrompt` component specifies a prompt template for the `ZeroShotAgent`. Set a _Prefix_ and _Suffix_ with rules for the agent to obey. In the example, we used default templates. The `LLMChain` is a simple chain that takes in a prompt template, formats it with the user input, and returns the response from an LLM. -:::tip -In this example, we used [`ChatOpenAI`](https://platform.openai.com/) as the LLM, but feel free to experiment with other Language Models! -::: + + In this example, we used [`ChatOpenAI`](https://platform.openai.com/) as the + LLM, but feel free to experiment with other Language Models! + The `ZeroShotAgent` takes the `LLMChain` and the `Search` tool as inputs, using the tool to find information when necessary. -:::info -Learn more about the Serp API [here](https://python.langchain.com/docs/modules/agents/tools/integrations/serpapi). -::: + + Learn more about the Serp API + [here](https://python.langchain.com/docs/modules/agents/tools/integrations/serpapi). + ## โ›“๏ธ Langflow Example @@ -35,11 +40,12 @@ import ZoomableImage from "/src/theme/ZoomableImage.js"; #### Download Flow -:::note LangChain Components ๐Ÿฆœ๐Ÿ”— + - [`ZeroShotPrompt`](https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/) - [`OpenAI`](https://python.langchain.com/docs/modules/model_io/models/llms/integrations/openai) - [`LLMChain`](https://python.langchain.com/docs/modules/chains/foundational/llm_chain) - [`Search`](https://python.langchain.com/docs/modules/agents/tools/integrations/serpapi) - [`ZeroShotAgent`](https://python.langchain.com/docs/modules/agents/how_to/custom_mrkl_agent) - ::: + + diff --git a/docs/docs/getting-started/hugging-face-spaces.mdx b/docs/docs/getting-started/hugging-face-spaces.mdx index e8b3852a9d..acc4bb8d52 100644 --- a/docs/docs/getting-started/hugging-face-spaces.mdx +++ b/docs/docs/getting-started/hugging-face-spaces.mdx @@ -6,15 +6,14 @@ import ThemedImage from "@theme/ThemedImage"; import useBaseUrl from "@docusaurus/useBaseUrl"; import ZoomableImage from "/src/theme/ZoomableImage.js"; -
- -
+{" "} + + Check out Langflow on [HuggingFace Spaces](https://huggingface.co/spaces/Logspace/Langflow). diff --git a/docs/docs/guidelines/chat-interface.mdx b/docs/docs/guidelines/chat-interface.mdx index c09f000762..0ac23dc8aa 100644 --- a/docs/docs/guidelines/chat-interface.mdx +++ b/docs/docs/guidelines/chat-interface.mdx @@ -7,58 +7,46 @@ import ReactPlayer from "react-player"; Langflowโ€™s chat interface provides a user-friendly experience and functionality to interact with the model and customize the prompt. The sidebar brings options that allow users to view and edit pre-defined prompt variables. This feature facilitates quick experimentation by enabling the modification of variable values right in the chat. -
- -
+{" "} + Notice that editing variables in the chat interface take place temporarily and wonโ€™t change their original value in the components once the chat is closed. -
- -
+{" "} -To view the complete prompt in its original, structured format, click the "Display Prompt" option. This feature lets you see the prompt exactly as it entered the model. + -
- -
+To view the complete prompt in its original, structured format, click the "Display Prompt" option. This feature lets you see the prompt exactly as it entered the model. +{" "} + In the chat interface, you can redefine which variable should be interpreted as the chat input. This gives you control over these inputs and allows dynamic and creative interactions. -
- -
+{" "} + diff --git a/docs/docs/guidelines/chat-widget.mdx b/docs/docs/guidelines/chat-widget.mdx new file mode 100644 index 0000000000..7f6737fea8 --- /dev/null +++ b/docs/docs/guidelines/chat-widget.mdx @@ -0,0 +1,209 @@ +import ThemedImage from "@theme/ThemedImage"; +import useBaseUrl from "@docusaurus/useBaseUrl"; +import ZoomableImage from "/src/theme/ZoomableImage.js"; +import ReactPlayer from "react-player"; +import Admonition from "@theme/Admonition"; + +# Chat Widget + +
+ The Langflow Chat Widget is a powerful web component that enables + communication with a Langflow project. This widget allows for a chat interface + embedding, allowing the integration of Langflow into web applications + effortlessly. +
+ +## Features + +๐ŸŒŸ **Seamless Integration:** Easily integrate the Langflow Chat Widget into your website or web application with just a few lines of JavaScript. + +๐Ÿš€ **Interactive Chat Interface:** Engage your users with a user-friendly conversation, powered by Langflow's advanced language understanding capabilities. + +๐ŸŽ›๏ธ **Customizable Styling:** Customize the appearance of the chat widget to match your application's design and branding. + +๐ŸŒ **Multilingual Support:** Communicate with users in multiple languages, opening up your application to a global audience. + +--- + +## Usage + +
+ You can get the HTML code embedded with the chat by clicking the Code button + at the Sidebar after building a flow. +
+ +{" "} + + + +
+ Clicking the Chat Widget HTML tab, you'll get the code to be inserted. Read + below to learn how to use it with HTML, React and Angular. +
+ +{" "} + + + +--- + +### HTML + +The Chat Widget can be embedded into any HTML page, inside a _``_ tag, as demonstrated in the video below. + +
+ +
+ +--- + +### React + +To embed the Chat Widget using React, you'll need to insert this _` +``` + +Then, declare your Web Component and encapsulate it in a React component. + +```jsx +declare global { + namespace JSX { + interface IntrinsicElements { + "langflow-chat": any; + } + } +} + +export default function ChatWidget({ className }) { + return ( +
+ +
+ ); +} +``` + +Finally, you can place the component anywhere in your code to display the Chat Widget. + +--- + +### Angular + +To use it in Angular, first add this _` +``` + +When you use a custom web component in an Angular template, the Angular compiler might show a warning when it doesn't recognize the custom elements by default. To suppress this warning, add _`CUSTOM_ELEMENTS_SCHEMA`_ to the module's _`@NgModule.schemas`_. + +- Open the module file (it typically ends with _.module.ts_) where you'd add the _`langflow-chat`_ web component. +- Import _`CUSTOM_ELEMENTS_SCHEMA`_ at the top of the file: + +```ts +import { NgModule, CUSTOM_ELEMENTS_SCHEMA } from "@angular/core"; +``` + +- Add _`CUSTOM_ELEMENTS_SCHEMA`_ to the 'schemas' array inside the '@NgModule' decorator: + +```ts +@NgModule({ + declarations: [ + // ... Other components and directives ... + ], + imports: [ + // ... Other imported modules ... + ], + schemas: [CUSTOM_ELEMENTS_SCHEMA], // Add the CUSTOM_ELEMENTS_SCHEMA here +}) +export class YourModule {} +``` + +In your Angular project, find the component belonging to the module where _`CUSTOM_ELEMENTS_SCHEMA`_ was added. + +- Inside the template, add the _`langflow-chat`_ tag to include the Chat Widget in your component's view: + +```jsx + +``` + + +
    +
  • + _`CUSTOM_ELEMENTS_SCHEMA`_ is a built-in schema that allows Angular to + recognize custom elements. +
  • +
  • + Adding _`CUSTOM_ELEMENTS_SCHEMA`_ tells Angular to allow custom elements + in your templates, and it will suppress the warning related to unknown + elements like _`langflow-chat`_. +
  • +
  • + Notice that you can only use the Chat Widget in components that are part + of the module where you added _`CUSTOM_ELEMENTS_SCHEMA`_. +
  • +
+
+ +--- + +## Configuration + +Use the widget API to customize your Chat Widget: + + + Props with the type JSON need to be passed as Stringified JSONs, with the + format {"key":"value"}. + + +| Prop | Type | Required | Description | +| --------------------- | ------- | -------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| bot_message_style | JSON | No | Applies custom formatting to bot messages. | +| chat_input_field | String | Yes | Defines the type of the input field for chat messages. | +| chat_inputs | JSON | Yes | Determines the chat input elements and their respective values. | +| chat_output_key | String | No | Specifies which output to display if multiple outputs are available. | +| chat_position | String | No | Positions the chat window on the screen (options include: top-left, top-center, top-right, center-left, center-right, bottom-right, bottom-center, bottom-left). | +| chat_trigger_style | JSON | No | Styles the chat trigger button. | +| chat_window_style | JSON | No | Customizes the overall appearance of the chat window. | +| error_message_style | JSON | No | Sets the format for error messages within the chat window. | +| flow_id | String | Yes | Identifies the flow that the component is associated with. | +| height | Number | No | Sets the height of the chat window in pixels. | +| host_url | String | Yes | Specifies the URL of the host for chat component communication. | +| input_container_style | JSON | No | Applies styling to the container where chat messages are entered. | +| input_style | JSON | No | Sets the style for the chat input field. | +| online | Boolean | No | Toggles the online status of the chat component. | +| online_message | String | No | Sets a custom message to display when the chat component is online. | +| placeholder | String | No | Sets the placeholder text for the chat input field. | +| placeholder_sending | String | No | Sets the placeholder text to display while a message is being sent. | +| send_button_style | JSON | No | Sets the style for the send button in the chat window. | +| send_icon_style | JSON | No | Sets the style for the send icon in the chat window. | +| tweaks | JSON | No | Applies additional custom adjustments for the associated flow. | +| user_message_style | JSON | No | Determines the formatting for user messages in the chat window. | +| width | Number | No | Sets the width of the chat window in pixels. | +| window_title | String | No | Sets the title displayed in the chat window's header or title bar. | diff --git a/docs/docs/guidelines/components.mdx b/docs/docs/guidelines/components.mdx index ba2f5ff337..b7dadcfce7 100644 --- a/docs/docs/guidelines/components.mdx +++ b/docs/docs/guidelines/components.mdx @@ -25,17 +25,14 @@ Components are the building blocks of the flows. They are made of inputs, output of that type is required. -
- -
+{" "} +
On the top right corner, you will find the component status icon ๐Ÿ”ด. Make the diff --git a/docs/docs/guidelines/custom-component.mdx b/docs/docs/guidelines/custom-component.mdx new file mode 100644 index 0000000000..bcd6372221 --- /dev/null +++ b/docs/docs/guidelines/custom-component.mdx @@ -0,0 +1,407 @@ +--- +description: Custom Components +hide_table_of_contents: true +--- + +import ZoomableImage from "/src/theme/ZoomableImage.js"; +import Admonition from "@theme/Admonition"; + +# Custom Components + +In Langflow, a Custom Component is a special component type that allows users to extend the platform's functionality by creating their own reusable and configurable components. + +A Custom Component is created from a user-defined Python script that uses the _`CustomComponent`_ class provided by the Langflow library. These components can be as simple as a basic function that takes and returns a string or as complex as a combination of multiple sub-components and API calls. + +Let's take a look at the basic rules and features. Then we'll go over an example. + +## TL;DR + +- Create a class that inherits from _`CustomComponent`_ and contains a _`build`_ method. +- Use arguments with [Type Annotations (or Type Hints)](https://docs.python.org/3/library/typing.html) of the _`build`_ method to create component fields. +- Use the _`build_config`_ method to customize how these fields look and behave. +- Set up a folder with your components to load them up in Langflow's sidebar. + +Here is an example: + +
+ + +```python +from langflow import CustomComponent +from langchain.schema import Document + +class DocumentProcessor(CustomComponent): + display_name = "Document Processor" + description = "This component processes a document" + + def build_config(self) -> dict: + options = ["Uppercase", "Lowercase", "Titlecase"] + return { + "function": {"options": options, + "value": options[0]}} + + def build(self, document: Document, function: str) -> Document: + if isinstance(document, list): + document = document[0] + page_content = document.page_content + if function == "Uppercase": + page_content = page_content.upper() + elif function == "Lowercase": + page_content = page_content.lower() + elif function == "Titlecase": + page_content = page_content.title() + self.repr_value = f"Result of {function} function: {page_content}" + return Document(page_content=page_content) +``` + + + + + +
+ + + Check out [FlowRunner Component](../examples/flow-runner) for a more complex + example. + + +--- + +## Rules + +The Python script for every Custom Component should follow a set of rules. Let's go over them one by one: + + + +### Rule 1 + +The script must contain a **single class** that inherits from _`CustomComponent`_. + +```python +from langflow import CustomComponent +from langchain.schema import Document + +class MyComponent(CustomComponent): + display_name = "Custom Component" + description = "This is a custom component" + + def build_config(self) -> dict: + ... + + def build(self, document: Document, function: str) -> Document: + ... +``` + +--- + +### Rule 2 + +This class requires a _`build`_ method used to run the component and define its fields. + +```python +from langflow import CustomComponent +from langchain.schema import Document + +class MyComponent(CustomComponent): + display_name = "Custom Component" + description = "This is a custom component" + + def build_config(self) -> dict: + ... + + # focus + # mark + def build(self) -> Document: + ... +``` + +--- + +The [Return Type Annotation](https://docs.python.org/3/library/typing.html) of the _`build`_ method defines the component type (e.g., Chain, BaseLLM, or basic Python types). Check out all supported types in the [component reference](../components/custom). + +```python +from langflow import CustomComponent +from langchain.schema import Document + +class MyComponent(CustomComponent): + display_name = "Custom Component" + description = "This is a custom component" + + def build_config(self) -> dict: + ... + + # focus[20:31] + # mark + def build(self) -> Document: + ... +``` + +--- + +```python +from langflow import CustomComponent +from langchain.schema import Document + +class MyComponent(CustomComponent): + display_name = "Custom Component" + description = "This is a custom component" + + def build_config(self) -> dict: + ... + + def build(self) -> Document: + ... +``` + +### Rule 3 + +The class can have a [_`build_config`_](focus://8) method, which defines configuration fields for the component. The [_`build_config`_](focus://8) method should always return a dictionary with specific keys representing the field names and their corresponding configurations. It must follow the format described below: + +- Top-level keys are field names. +- Their values are also of type _`dict`_. They specify the behavior of the generated fields. + +Check out the [component reference](../components/custom) for more details on the available field configurations. + +--- + +```python +from langflow import CustomComponent +from langchain.schema import Document + +class MyComponent(CustomComponent): + display_name = "Custom Component" + description = "This is a custom component" + + def build_config(self) -> dict: + ... + + def build(self) -> Document: + ... +``` + +## Example + +Let's create a custom component that processes a document (_`langchain.schema.Document`_) using a simple function. + +--- + +### Pick a display name + +To start, let's choose a name for our component by adding a _`display_name`_ attribute. This name will appear on the canvas. The name of the class is not relevant, but let's call it _`DocumentProcessor`_. + +```python +from langflow import CustomComponent +from langchain.schema import Document + +# focus +class DocumentProcessor(CustomComponent): + # focus + display_name = "Document Processor" + description = "This is a custom component" + + def build_config(self) -> dict: + ... + + def build(self) -> Document: + ... +``` + +--- + +### Write a description + +We can also write a description for it using a _`description`_ attribute. + +```python +from langflow import CustomComponent +from langchain.schema import Document + +class DocumentProcessor(CustomComponent): + display_name = "Document Processor" + description = "This component processes a document" + + def build_config(self) -> dict: + ... + + def build(self) -> Document: + ... +``` + +--- + +```python +from langflow import CustomComponent +from langchain.schema import Document + +class DocumentProcessor(CustomComponent): + display_name = "Document Processor" + description = "This component processes a document" + + def build_config(self) -> dict: + ... + + def build(self, document: Document, function: str) -> Document: + if isinstance(document, list): + document = document[0] + page_content = document.page_content + if function == "Uppercase": + page_content = page_content.upper() + elif function == "Lowercase": + page_content = page_content.lower() + elif function == "Titlecase": + page_content = page_content.title() + self.repr_value = f"Result of {function} function: {page_content}" + return Document(page_content=page_content) +``` + +### Add the build method + +Here, the build method takes two input parameters: _`document`_, representing the input document to be processed, and _`function`_, a string representing the selected text transformation to be applied (either "Uppercase," "Lowercase," or "Titlecase"). The method processes the text content of the input Document based on the selected function. + +The attribute _`repr_value`_ is used to display the result of the component on the canvas. It is optional and can be used to display any string value. + +The return type is _`Document`_. + +--- + +### Customize the component fields + +The _`build_config`_ method is here defined to customize the component fields. + +- _`options`_ determines that the field will be a dropdown menu. The list values and field type must be _`str`_. +- _`value`_ is the default option of the dropdown menu. +- _`display_name`_ is the name of the field to be displayed. + +```python +from langflow import CustomComponent +from langchain.schema import Document + +class DocumentProcessor(CustomComponent): + display_name = "Document Processor" + description = "This component processes a document" + + def build_config(self) -> dict: + options = ["Uppercase", "Lowercase", "Titlecase"] + return { + "function": {"options": options, + "value": options[0], + "display_name": "Function" + }, + "document": {"display_name": "Document"} + } + + def build(self, document: Document, function: str) -> Document: + if isinstance(document, list): + document = document[0] + page_content = document.page_content + if function == "Uppercase": + page_content = page_content.upper() + elif function == "Lowercase": + page_content = page_content.lower() + elif function == "Titlecase": + page_content = page_content.title() + self.repr_value = f"Result of {function} function: {page_content}" + return Document(page_content=page_content) +``` + + + +All done! This is what our script and brand-new custom component look like: + +
+ + + + + +
+ +--- + +## Loading Custom Components + +For advanced customization, Langflow offers the option to create and load custom components outside of the standard interface. This process involves creating the desired components using a text editor and loading them using the Langflow CLI. + +### Folder Structure + +Create a folder that follows the same structural conventions as the [config.yaml](https://github.com/logspace-ai/langflow/blob/dev/src/backend/langflow/config.yaml) file. Inside this main directory, use a `custom_components` subdirectory for your custom components. + +Inside `custom_components`, you can create a Python file for each component. Similarly, any custom agents should be housed in an `agents` subdirectory. + +If you use a subdirectory name that is not in our config.yaml file, your component will appear in an `Other` category in the sidebar. + +Your structure should look something like this: + +``` +. +โ””โ”€โ”€ custom_components + โ”œโ”€โ”€ document_processor.py + โ””โ”€โ”€ ... +โ””โ”€โ”€ agents + โ””โ”€โ”€ ... +โ””โ”€โ”€ my_agents <-- Other category + โ””โ”€โ”€ ... +``` + +### Loading Custom Components + +You can specify the path to your custom components using the _`--components-path`_ argument when running the Langflow CLI, as shown below: + +```bash +langflow --components-path /path/to/components +``` + +Alternatively, you can set the `LANGFLOW_COMPONENTS_PATH` environment variable: + +```bash +export LANGFLOW_COMPONENTS_PATH=/path/to/components +langflow +``` + +Langflow will attempt to load all of the components found in the specified directory. If a component fails to load due to errors in the component's code, Langflow will print an error message to the console but will continue loading the rest of the components. + +### Interacting with Custom Components + +Once your custom components have been loaded successfully, they will appear in Langflow's sidebar. From there, you can add them to your Langflow canvas for use. However, please note that components with errors will not be available for addition to the canvas. Always ensure your code is error-free before attempting to load components. + +Remember, creating custom components allows you to extend the functionality of Langflow to better suit your unique needs. Happy coding! diff --git a/docs/docs/guidelines/features.mdx b/docs/docs/guidelines/features.mdx index cf8b09c6e7..6235b68dbf 100644 --- a/docs/docs/guidelines/features.mdx +++ b/docs/docs/guidelines/features.mdx @@ -2,6 +2,7 @@ import ThemedImage from "@theme/ThemedImage"; import useBaseUrl from "@docusaurus/useBaseUrl"; import ZoomableImage from "/src/theme/ZoomableImage.js"; import ReactPlayer from "react-player"; +import Admonition from "@theme/Admonition"; # Features @@ -12,17 +13,14 @@ import ReactPlayer from "react-player"; below:
-
- -
+{" "} +
Further down, we will explain each of these options. @@ -34,9 +32,10 @@ import ReactPlayer from "react-player"; Flows can be exported and imported as JSON files. -:::caution + Watch out for API keys being stored in local files. -::: + + --- diff --git a/docs/docs/guidelines/prompt-customization.mdx b/docs/docs/guidelines/prompt-customization.mdx index 8e2f409f92..efb5b3928a 100644 --- a/docs/docs/guidelines/prompt-customization.mdx +++ b/docs/docs/guidelines/prompt-customization.mdx @@ -7,80 +7,62 @@ import ReactPlayer from "react-player"; The prompt template allows users to create prompts and define variables that provide control over instructing the model. -
- -
+{" "} + Variables can be used to define instructions, questions, context, inputs, or examples for the model and can be created with any chosen name in curly brackets, e.g., `{variable_name}`. They act as placeholders for parts of the text that can be easily modified. -
- -
+{" "} + Once inserted, these variables are immediately recognized as new fields in the prompt component. Here, you can define their values within the component itself or leave a field empty to be adjusted over the chat interface. -
- -
+{" "} -You can also use documents or output parsers as prompt variables. By plugging them into prompt handles, theyโ€™ll disable and feed that input field. + -
- -
+You can also use documents or output parsers as prompt variables. By plugging them into prompt handles, theyโ€™ll disable and feed that input field. +{" "} + With this, users can interact with documents, webpages, or any other type of content directly from the prompt, which allows for seamless integration of external resources with the language model. - - If working with an interactive (chat-like) flow, remember to keep one of the input variables empty to behave as the chat input. -
- -
- +{" "} + diff --git a/docs/docs/guides/chatprompttemplate_guide.mdx b/docs/docs/guides/chatprompttemplate_guide.mdx index 422bb6420a..05a8f3333b 100644 --- a/docs/docs/guides/chatprompttemplate_guide.mdx +++ b/docs/docs/guides/chatprompttemplate_guide.mdx @@ -39,8 +39,7 @@ In this guide, we will modify the "Basic Chat with Prompt and History" example, 5. Open the "Prompt" field on the SystemMessagePromptTemplate component. -6. Enter the text: `You are a {role} that {behavior}.` - +6. Enter the text: _`You are a {role} that {behavior}.`_ 7. Save your changes by clicking on "Check & Save". 8. Define the 'role' variable by typing "obedient assistant". diff --git a/docs/docs/index.mdx b/docs/docs/index.mdx index 4ec4a300dc..7be04549c7 100644 --- a/docs/docs/index.mdx +++ b/docs/docs/index.mdx @@ -6,13 +6,11 @@ import ThemedImage from "@theme/ThemedImage"; import useBaseUrl from "@docusaurus/useBaseUrl"; import ZoomableImage from "/src/theme/ZoomableImage.js"; -
- -
+{" "} + diff --git a/docs/docusaurus.config.js b/docs/docusaurus.config.js index 3ac152b5b6..617aec3d0c 100644 --- a/docs/docusaurus.config.js +++ b/docs/docusaurus.config.js @@ -1,127 +1,145 @@ const lightCodeTheme = require("prism-react-renderer/themes/github"); +const { remarkCodeHike } = require("@code-hike/mdx"); // With JSDoc @type annotations, IDEs can provide config autocompletion /** @type {import('@docusaurus/types').DocusaurusConfig} */ -( - module.exports = { - title: "Langflow Documentation", - tagline: "Langflow is a GUI for LangChain, designed with react-flow", - favicon: "img/favicon.ico", - url: "https://logspace-ai.github.io", - baseUrl: "/", - onBrokenLinks: "throw", - onBrokenMarkdownLinks: "warn", - organizationName: "logspace-ai", - projectName: "langflow", - trailingSlash: false, - customFields: { - mendableAnonKey: process.env.MENDABLE_ANON_KEY, - }, - i18n: { - defaultLocale: "en", - locales: ["en"], - }, - presets: [ - [ - "@docusaurus/preset-classic", - /** @type {import('@docusaurus/preset-classic').Options} */ - ({ - docs: { - routeBasePath: "/", - sidebarPath: require.resolve("./sidebars.js"), - path: "docs", - // sidebarPath: 'sidebars.js', - }, - theme: { - customCss: require.resolve("./src/css/custom.css"), - }, - }), - ], - ], - plugins: [ - ["docusaurus-node-polyfills", { excludeAliases: ["console"] }], - "docusaurus-plugin-image-zoom", - // .... - async function myPlugin(context, options) { - return { - name: "docusaurus-tailwindcss", - configurePostCss(postcssOptions) { - // Appends TailwindCSS and AutoPrefixer. - postcssOptions.plugins.push(require("tailwindcss")); - postcssOptions.plugins.push(require("autoprefixer")); - return postcssOptions; - }, - }; - }, - ], - themeConfig: - /** @type {import('@docusaurus/preset-classic').ThemeConfig} */ +module.exports = { + title: "Langflow Documentation", + tagline: "Langflow is a GUI for LangChain, designed with react-flow", + favicon: "img/favicon.ico", + url: "https://logspace-ai.github.io", + baseUrl: "/", + onBrokenLinks: "throw", + onBrokenMarkdownLinks: "warn", + organizationName: "logspace-ai", + projectName: "langflow", + trailingSlash: false, + customFields: { + mendableAnonKey: process.env.MENDABLE_ANON_KEY, + }, + i18n: { + defaultLocale: "en", + locales: ["en"], + }, + presets: [ + [ + "@docusaurus/preset-classic", + /** @type {import('@docusaurus/preset-classic').Options} */ ({ - navbar: { - hideOnScroll: true, - title: "Langflow", - logo: { - alt: "Langflow", - src: "img/chain.png", - }, - items: [ - // right - { - position: "right", - href: "https://github.com/logspace-ai/langflow", - position: "right", - className: "header-github-link", - target: "_blank", - rel: null, - }, - { - position: "right", - href: "https://twitter.com/logspace_ai", - position: "right", - className: "header-twitter-link", - target: "_blank", - rel: null, - }, - { - position: "right", - href: "https://discord.gg/EqksyE2EX9", - position: "right", - className: "header-discord-link", - target: "_blank", - rel: null, - }, + docs: { + beforeDefaultRemarkPlugins: [ + [ + remarkCodeHike, + { + theme: "github-light", + showCopyButton: true, + lineNumbers: true, + }, + ], ], + routeBasePath: "/", + sidebarPath: require.resolve("./sidebars.js"), + path: "docs", + // sidebarPath: 'sidebars.js', }, - tableOfContents: { - minHeadingLevel: 2, - maxHeadingLevel: 5, - }, - colorMode: { - defaultMode: "light", - disableSwitch: true, - respectPrefersColorScheme: false, + theme: { + customCss: [ + require.resolve("@code-hike/mdx/styles.css"), + require.resolve("./src/css/custom.css"), + ], }, - announcementBar: { - content: - 'โญ๏ธ If you like โ›“๏ธLangflow, star it on GitHub! โญ๏ธ', - backgroundColor: "#B53D38", //Mustard Yellow #D19900 #D4B20B - Salmon #E9967A - textColor: "#fff", - isCloseable: false, + }), + ], + ], + plugins: [ + ["docusaurus-node-polyfills", { excludeAliases: ["console"] }], + "docusaurus-plugin-image-zoom", + // .... + async function myPlugin(context, options) { + return { + name: "docusaurus-tailwindcss", + configurePostCss(postcssOptions) { + // Appends TailwindCSS and AutoPrefixer. + postcssOptions.plugins.push(require("tailwindcss")); + postcssOptions.plugins.push(require("autoprefixer")); + return postcssOptions; }, - footer: { - links: [], - copyright: `Copyright ยฉ ${new Date().getFullYear()} Logspace.`, + }; + }, + ], + themes: ["mdx-v2"], + themeConfig: + /** @type {import('@docusaurus/preset-classic').ThemeConfig} */ + ({ + navbar: { + hideOnScroll: true, + title: "Langflow", + logo: { + alt: "Langflow", + src: "img/chain.png", }, - zoom: { - selector: ".markdown :not(a) > img:not(.no-zoom)", - background: { - light: "rgba(240, 240, 240, 0.9)", + items: [ + // right + { + position: "right", + href: "https://github.com/logspace-ai/langflow", + position: "right", + className: "header-github-link", + target: "_blank", + rel: null, + }, + { + position: "right", + href: "https://twitter.com/logspace_ai", + position: "right", + className: "header-twitter-link", + target: "_blank", + rel: null, }, - config: {}, + { + position: "right", + href: "https://discord.gg/EqksyE2EX9", + position: "right", + className: "header-discord-link", + target: "_blank", + rel: null, + }, + ], + }, + tableOfContents: { + minHeadingLevel: 2, + maxHeadingLevel: 5, + }, + colorMode: { + defaultMode: "light", + disableSwitch: true, + respectPrefersColorScheme: false, + }, + announcementBar: { + content: + 'โญ๏ธ If you like โ›“๏ธLangflow, star it on GitHub! โญ๏ธ', + backgroundColor: "#E8EBF1", //Mustard Yellow #D19900 #D4B20B - Salmon #E9967A + textColor: "#1C1E21", + isCloseable: false, + }, + footer: { + links: [], + copyright: `Copyright ยฉ ${new Date().getFullYear()} Logspace.`, + }, + zoom: { + selector: ".markdown :not(a) > img:not(.no-zoom)", + background: { + light: "rgba(240, 240, 240, 0.9)", }, - prism: { - theme: lightCodeTheme, + config: {}, + }, + // prism: { + // theme: require("prism-react-renderer/themes/dracula"), + // }, + docs: { + sidebar: { + hideable: true, }, - }), - } -); + }, + }), +}; diff --git a/docs/package-lock.json b/docs/package-lock.json index 7db7f93767..ed79230c62 100644 --- a/docs/package-lock.json +++ b/docs/package-lock.json @@ -9,12 +9,13 @@ "version": "0.0.0", "dependencies": { "@babel/preset-react": "^7.22.3", + "@code-hike/mdx": "^0.9.0", "@docusaurus/core": "2.4.1", "@docusaurus/plugin-ideal-image": "^2.4.1", "@docusaurus/preset-classic": "2.4.1", "@docusaurus/theme-classic": "^2.4.1", "@docusaurus/theme-search-algolia": "^2.4.1", - "@mdx-js/react": "^1.6.22", + "@mdx-js/react": "^2.3.0", "@mendable/search": "^0.0.114", "@pbe/react-yandex-maps": "^1.2.4", "@prismicio/client": "^7.0.1", @@ -22,6 +23,7 @@ "autoprefixer": "^10.4.14", "clsx": "^1.2.1", "docusaurus-plugin-image-zoom": "^0.1.4", + "docusaurus-theme-mdx-v2": "^0.1.2", "jquery": "^3.7.0", "medium-zoom": "^1.0.8", "node-fetch": "^3.3.1", @@ -1986,6 +1988,49 @@ "node": ">=6.9.0" } }, + "node_modules/@code-hike/lighter": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/@code-hike/lighter/-/lighter-0.7.0.tgz", + "integrity": "sha512-64O07rIORKQLB+5T/GKAmKcD9sC0N9yHFJXa0Hs+0Aee1G+I4bSXxTccuDFP6c/G/3h5Pk7yv7PoX9/SpzaeiQ==", + "funding": { + "url": "https://github.com/code-hike/lighter?sponsor=1" + } + }, + "node_modules/@code-hike/mdx": { + "version": "0.9.0", + "resolved": "https://registry.npmjs.org/@code-hike/mdx/-/mdx-0.9.0.tgz", + "integrity": "sha512-0wg68ZCjVWAkWT4gBUZJ8Mwktjen/XeWyqBQCrhA2IZSbZZnMYsEI6JJEFb/nZoNI3comB3JdxPLykZRq3qT2A==", + "dependencies": { + "@code-hike/lighter": "0.7.0", + "node-fetch": "^2.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/code-hike" + }, + "peerDependencies": { + "react": "^16.8.3 || ^17 || ^18" + } + }, + "node_modules/@code-hike/mdx/node_modules/node-fetch": { + "version": "2.6.12", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.12.tgz", + "integrity": "sha512-C/fGU2E8ToujUivIO0H+tpQ6HWo4eEmchoPIoXtxCrVghxdKq+QOHqEZW7tuP3KlV3bC8FRMO5nMCC7Zm1VP6g==", + "dependencies": { + "whatwg-url": "^5.0.0" + }, + "engines": { + "node": "4.x || >=6.0.0" + }, + "peerDependencies": { + "encoding": "^0.1.0" + }, + "peerDependenciesMeta": { + "encoding": { + "optional": true + } + } + }, "node_modules/@colors/colors": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/@colors/colors/-/colors-1.5.0.tgz", @@ -2683,6 +2728,18 @@ "react-dom": "^16.8.4 || ^17.0.0" } }, + "node_modules/@docusaurus/theme-classic/node_modules/@mdx-js/react": { + "version": "1.6.22", + "resolved": "https://registry.npmjs.org/@mdx-js/react/-/react-1.6.22.tgz", + "integrity": "sha512-TDoPum4SHdfPiGSAaRBw7ECyI8VaHpK8GJugbJIJuqyh6kzw9ZLJZW3HGL3NNrJGxcAixUvqROm+YuQOo5eXtg==", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + }, + "peerDependencies": { + "react": "^16.13.1 || ^17.0.0" + } + }, "node_modules/@docusaurus/theme-common": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/@docusaurus/theme-common/-/theme-common-2.4.1.tgz", @@ -3168,15 +3225,19 @@ } }, "node_modules/@mdx-js/react": { - "version": "1.6.22", - "resolved": "https://registry.npmjs.org/@mdx-js/react/-/react-1.6.22.tgz", - "integrity": "sha512-TDoPum4SHdfPiGSAaRBw7ECyI8VaHpK8GJugbJIJuqyh6kzw9ZLJZW3HGL3NNrJGxcAixUvqROm+YuQOo5eXtg==", + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@mdx-js/react/-/react-2.3.0.tgz", + "integrity": "sha512-zQH//gdOmuu7nt2oJR29vFhDv88oGPmVw6BggmrHeMI+xgEkp1B2dX9/bMBSYtK0dyLX/aOmesKS09g222K1/g==", + "dependencies": { + "@types/mdx": "^2.0.0", + "@types/react": ">=16" + }, "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" }, "peerDependencies": { - "react": "^16.13.1 || ^17.0.0" + "react": ">=16" } }, "node_modules/@mdx-js/util": { @@ -3665,6 +3726,14 @@ "node": ">=10.13.0" } }, + "node_modules/@types/acorn": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/@types/acorn/-/acorn-4.0.6.tgz", + "integrity": "sha512-veQTnWP+1D/xbxVrPC3zHnCZRjSrKfhbMUlEA43iMZLu7EsnTtkJklIuwrCPbOi8YkvDQAiW05VQQFvvz9oieQ==", + "dependencies": { + "@types/estree": "*" + } + }, "node_modules/@types/body-parser": { "version": "1.19.2", "resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.2.tgz", @@ -3730,6 +3799,14 @@ "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.1.tgz", "integrity": "sha512-LG4opVs2ANWZ1TJoKc937iMmNstM/d0ae1vNbnBvBhqCSezgVUOzcLCqbI5elV8Vy6WKwKjaqR+zO9VKirBBCA==" }, + "node_modules/@types/estree-jsx": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@types/estree-jsx/-/estree-jsx-1.0.0.tgz", + "integrity": "sha512-3qvGd0z8F2ENTGr/GG1yViqfiKmRfrXVx5sJyHGFu3z7m5g5utCQtGp/g29JnjflhtQJBv1WDQukHiT58xPcYQ==", + "dependencies": { + "@types/estree": "*" + } + }, "node_modules/@types/express": { "version": "4.17.17", "resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.17.tgz", @@ -3817,6 +3894,11 @@ "@types/unist": "^2" } }, + "node_modules/@types/mdx": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@types/mdx/-/mdx-2.0.5.tgz", + "integrity": "sha512-76CqzuD6Q7LC+AtbPqrvD9AqsN0k8bsYo2bM2J8pmNldP1aIPAbzUQ7QbobyXL4eLr1wK5x8FZFe8eF/ubRuBg==" + }, "node_modules/@types/mime": { "version": "1.3.2", "resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.2.tgz", @@ -4198,6 +4280,14 @@ "acorn": "^8" } }, + "node_modules/acorn-jsx": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", + "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", + "peerDependencies": { + "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" + } + }, "node_modules/acorn-walk": { "version": "8.2.0", "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.2.0.tgz", @@ -4502,6 +4592,14 @@ "util": "^0.12.0" } }, + "node_modules/astring": { + "version": "1.8.6", + "resolved": "https://registry.npmjs.org/astring/-/astring-1.8.6.tgz", + "integrity": "sha512-ISvCdHdlTDlH5IpxQJIex7BWBywFWgjJSVdwst+/iQCoEYnyOaQ95+X1JGshuBjGp6nxKUy1jMgE3zPqN7fQdg==", + "bin": { + "astring": "bin/astring" + } + }, "node_modules/async-foreach": { "version": "0.1.3", "resolved": "https://registry.npmjs.org/async-foreach/-/async-foreach-0.1.3.tgz", @@ -5391,6 +5489,15 @@ "url": "https://github.com/sponsors/wooorm" } }, + "node_modules/character-entities-html4": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/character-entities-html4/-/character-entities-html4-2.1.0.tgz", + "integrity": "sha512-1v7fgQRj6hnSwFpq1Eu0ynr/CDEw0rXo2B61qXrLNdHZmPKgb7fqS1a2JwF0rISo9q77jDI8VMEHoApn8qDoZA==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/character-entities-legacy": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/character-entities-legacy/-/character-entities-legacy-1.1.4.tgz", @@ -6884,135 +6991,437 @@ "node": ">=6" } }, - "node_modules/docusaurus-node-polyfills": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/docusaurus-node-polyfills/-/docusaurus-node-polyfills-1.0.0.tgz", - "integrity": "sha512-TUX/smcS0NcoiBKThM3hNlAx7Z8jJr/F5UKR+FiMlsJJbt1KYWgbj3blgUTk/ad0+hfe2vaytJZX4r0GeK6oRQ==", - "dev": true, - "dependencies": { - "node-polyfill-webpack-plugin": "^1.1.2", - "os-browserify": "^0.3.0", - "process": "^0.11.10" + "node_modules/docusaurus-mdx-loader-v2": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/docusaurus-mdx-loader-v2/-/docusaurus-mdx-loader-v2-0.1.2.tgz", + "integrity": "sha512-Dd/XieCKKoirnJDou4h33zRZPCmbtSqvXrZm0yMmhCpLDpeScu8CBvveFVHCqs7UB+x82IpzgZX5rHkoFlz2Bw==", + "dependencies": { + "@babel/parser": "^7.17.3", + "@babel/traverse": "^7.17.3", + "@docusaurus/logger": "2.0.0-beta.18", + "@docusaurus/utils": "2.0.0-beta.18", + "@mdx-js/mdx": "^2.1.0", + "escape-html": "^1.0.3", + "estree-util-value-to-estree": "^1.3.0", + "file-loader": "^6.2.0", + "fs-extra": "^10.0.1", + "image-size": "^1.0.1", + "lz-string": "^1.4.4", + "mdast-util-to-string": "^2.0.0", + "remark-admonitions": "^1.2.1", + "remark-emoji": "^2.1.0", + "remark-gfm": "1.0.0", + "stringify-object": "^3.3.0", + "tslib": "^2.3.1", + "unist-util-visit": "^2.0.2", + "url-loader": "^4.1.1", + "webpack": "^5.69.1" + }, + "engines": { + "node": ">=14" }, "peerDependencies": { - "webpack": ">=5" + "react": "^16.8.4 || ^17.0.0", + "react-dom": "^16.8.4 || ^17.0.0" } }, - "node_modules/docusaurus-plugin-image-zoom": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/docusaurus-plugin-image-zoom/-/docusaurus-plugin-image-zoom-0.1.4.tgz", - "integrity": "sha512-Fc16FUUVKgvF0OuMapsau0GFopSAH99t+FCRtTpF8n2Wt8toi5Et3xHKPNvuCb85PTtDuNfFUp5m9eCij0dXlw==", + "node_modules/docusaurus-mdx-loader-v2/node_modules/@docusaurus/logger": { + "version": "2.0.0-beta.18", + "resolved": "https://registry.npmjs.org/@docusaurus/logger/-/logger-2.0.0-beta.18.tgz", + "integrity": "sha512-frNe5vhH3mbPmH980Lvzaz45+n1PQl3TkslzWYXQeJOkFX17zUd3e3U7F9kR1+DocmAqHkgAoWuXVcvEoN29fg==", "dependencies": { - "medium-zoom": "^1.0.6" + "chalk": "^4.1.2", + "tslib": "^2.3.1" + }, + "engines": { + "node": ">=14" } }, - "node_modules/dom-converter": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/dom-converter/-/dom-converter-0.2.0.tgz", - "integrity": "sha512-gd3ypIPfOMr9h5jIKq8E3sHOTCjeirnl0WK5ZdS1AW0Odt0b1PaWaHdJ4Qk4klv+YB9aJBS7mESXjFoDQPu6DA==", + "node_modules/docusaurus-mdx-loader-v2/node_modules/@docusaurus/utils": { + "version": "2.0.0-beta.18", + "resolved": "https://registry.npmjs.org/@docusaurus/utils/-/utils-2.0.0-beta.18.tgz", + "integrity": "sha512-v2vBmH7xSbPwx3+GB90HgLSQdj+Rh5ELtZWy7M20w907k0ROzDmPQ/8Ke2DK3o5r4pZPGnCrsB3SaYI83AEmAA==", "dependencies": { - "utila": "~0.4" + "@docusaurus/logger": "2.0.0-beta.18", + "@svgr/webpack": "^6.2.1", + "file-loader": "^6.2.0", + "fs-extra": "^10.0.1", + "github-slugger": "^1.4.0", + "globby": "^11.1.0", + "gray-matter": "^4.0.3", + "js-yaml": "^4.1.0", + "lodash": "^4.17.21", + "micromatch": "^4.0.5", + "resolve-pathname": "^3.0.0", + "shelljs": "^0.8.5", + "tslib": "^2.3.1", + "url-loader": "^4.1.1", + "webpack": "^5.70.0" + }, + "engines": { + "node": ">=14" } }, - "node_modules/dom-helpers": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/dom-helpers/-/dom-helpers-5.2.1.tgz", - "integrity": "sha512-nRCa7CK3VTrM2NmGkIy4cbK7IZlgBE/PYMn55rrXefr5xXDP0LdtfPnblFDoVdcAfslJ7or6iqAUnx0CCGIWQA==", - "dependencies": { - "@babel/runtime": "^7.8.7", - "csstype": "^3.0.2" + "node_modules/docusaurus-mdx-loader-v2/node_modules/@mdx-js/mdx": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@mdx-js/mdx/-/mdx-2.3.0.tgz", + "integrity": "sha512-jLuwRlz8DQfQNiUCJR50Y09CGPq3fLtmtUQfVrj79E0JWu3dvsVcxVIcfhR5h0iXu+/z++zDrYeiJqifRynJkA==", + "dependencies": { + "@types/estree-jsx": "^1.0.0", + "@types/mdx": "^2.0.0", + "estree-util-build-jsx": "^2.0.0", + "estree-util-is-identifier-name": "^2.0.0", + "estree-util-to-js": "^1.1.0", + "estree-walker": "^3.0.0", + "hast-util-to-estree": "^2.0.0", + "markdown-extensions": "^1.0.0", + "periscopic": "^3.0.0", + "remark-mdx": "^2.0.0", + "remark-parse": "^10.0.0", + "remark-rehype": "^10.0.0", + "unified": "^10.0.0", + "unist-util-position-from-estree": "^1.0.0", + "unist-util-stringify-position": "^3.0.0", + "unist-util-visit": "^4.0.0", + "vfile": "^5.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/dom-serializer": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-2.0.0.tgz", - "integrity": "sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==", + "node_modules/docusaurus-mdx-loader-v2/node_modules/@mdx-js/mdx/node_modules/unist-util-visit": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-4.1.2.tgz", + "integrity": "sha512-MSd8OUGISqHdVvfY9TPhyK2VdUrPgxkUtWSuMHF6XAAFuL4LokseigBnZtPnJMu+FbynTkFNnFlyjxpVKujMRg==", "dependencies": { - "domelementtype": "^2.3.0", - "domhandler": "^5.0.2", - "entities": "^4.2.0" + "@types/unist": "^2.0.0", + "unist-util-is": "^5.0.0", + "unist-util-visit-parents": "^5.1.1" }, "funding": { - "url": "https://github.com/cheeriojs/dom-serializer?sponsor=1" + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/domain-browser": { - "version": "4.22.0", - "resolved": "https://registry.npmjs.org/domain-browser/-/domain-browser-4.22.0.tgz", - "integrity": "sha512-IGBwjF7tNk3cwypFNH/7bfzBcgSCbaMOD3GsaY1AU/JRrnHnYgEM0+9kQt52iZxjNsjBtJYtao146V+f8jFZNw==", - "dev": true, + "node_modules/docusaurus-mdx-loader-v2/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dependencies": { + "color-convert": "^2.0.1" + }, "engines": { - "node": ">=10" + "node": ">=8" }, "funding": { - "url": "https://bevry.me/fund" + "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "node_modules/domelementtype": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-2.3.0.tgz", - "integrity": "sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/fb55" - } - ] + "node_modules/docusaurus-mdx-loader-v2/node_modules/bail": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/bail/-/bail-2.0.2.tgz", + "integrity": "sha512-0xO6mYd7JB2YesxDKplafRpsiOzPt9V02ddPCLbY1xYGPOX24NTyN50qnUxgCPcSoYMhKpAuBTjQoRZCAkUDRw==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } }, - "node_modules/domhandler": { - "version": "5.0.3", - "resolved": "https://registry.npmjs.org/domhandler/-/domhandler-5.0.3.tgz", - "integrity": "sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==", + "node_modules/docusaurus-mdx-loader-v2/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dependencies": { - "domelementtype": "^2.3.0" + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" }, "engines": { - "node": ">= 4" + "node": ">=10" }, "funding": { - "url": "https://github.com/fb55/domhandler?sponsor=1" + "url": "https://github.com/chalk/chalk?sponsor=1" } }, - "node_modules/domutils": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/domutils/-/domutils-3.1.0.tgz", - "integrity": "sha512-H78uMmQtI2AhgDJjWeQmHwJJ2bLPD3GMmO7Zja/ZZh84wkm+4ut+IUnUdRa8uCGX88DiVx1j6FRe1XfxEgjEZA==", + "node_modules/docusaurus-mdx-loader-v2/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dependencies": { - "dom-serializer": "^2.0.0", - "domelementtype": "^2.3.0", - "domhandler": "^5.0.3" + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/docusaurus-mdx-loader-v2/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" + }, + "node_modules/docusaurus-mdx-loader-v2/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "engines": { + "node": ">=8" + } + }, + "node_modules/docusaurus-mdx-loader-v2/node_modules/is-plain-obj": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-4.1.0.tgz", + "integrity": "sha512-+Pgi+vMuUNkJyExiMBt5IlFoMyKnr5zhJ4Uspz58WOhBF5QoIZkFyNHIbBAtHwzVAgk5RtndVNsDRN61/mmDqg==", + "engines": { + "node": ">=12" }, "funding": { - "url": "https://github.com/fb55/domutils?sponsor=1" + "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/dot-case": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/dot-case/-/dot-case-3.0.4.tgz", - "integrity": "sha512-Kv5nKlh6yRrdrGvxeJ2e5y2eRUpkUosIW4A2AS38zwSz27zu7ufDwQPi5Jhs3XAlGNetl3bmnGhQsMtkKJnj3w==", + "node_modules/docusaurus-mdx-loader-v2/node_modules/remark-mdx": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/remark-mdx/-/remark-mdx-2.3.0.tgz", + "integrity": "sha512-g53hMkpM0I98MU266IzDFMrTD980gNF3BJnkyFcmN+dD873mQeD5rdMO3Y2X+x8umQfbSE0PcoEDl7ledSA+2g==", "dependencies": { - "no-case": "^3.0.4", - "tslib": "^2.0.3" + "mdast-util-mdx": "^2.0.0", + "micromark-extension-mdxjs": "^1.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/dot-prop": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-5.3.0.tgz", - "integrity": "sha512-QM8q3zDe58hqUqjraQOmzZ1LIH9SWQJTlEKCH4kJ2oQvLZk7RbQXvtDM2XEq3fwkV9CCvvH4LA0AV+ogFsBM2Q==", + "node_modules/docusaurus-mdx-loader-v2/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dependencies": { - "is-obj": "^2.0.0" + "has-flag": "^4.0.0" }, "engines": { "node": ">=8" } }, - "node_modules/dot-prop/node_modules/is-obj": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-2.0.0.tgz", - "integrity": "sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w==", - "engines": { - "node": ">=8" - } + "node_modules/docusaurus-mdx-loader-v2/node_modules/trough": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/trough/-/trough-2.1.0.tgz", + "integrity": "sha512-AqTiAOLcj85xS7vQ8QkAV41hPDIJ71XJB4RCUrzo/1GM2CQwhkJGaf9Hgr7BOugMRpgGUrqRg/DrBDl4H40+8g==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/docusaurus-mdx-loader-v2/node_modules/unified": { + "version": "10.1.2", + "resolved": "https://registry.npmjs.org/unified/-/unified-10.1.2.tgz", + "integrity": "sha512-pUSWAi/RAnVy1Pif2kAoeWNBa3JVrx0MId2LASj8G+7AiHWoKZNTomq6LG326T68U7/e263X6fTdcXIy7XnF7Q==", + "dependencies": { + "@types/unist": "^2.0.0", + "bail": "^2.0.0", + "extend": "^3.0.0", + "is-buffer": "^2.0.0", + "is-plain-obj": "^4.0.0", + "trough": "^2.0.0", + "vfile": "^5.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/docusaurus-mdx-loader-v2/node_modules/unist-util-is": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-5.2.1.tgz", + "integrity": "sha512-u9njyyfEh43npf1M+yGKDGVPbY/JWEemg5nH05ncKPfi+kBbKBJoTdsogMu33uhytuLlv9y0O7GH7fEdwLdLQw==", + "dependencies": { + "@types/unist": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/docusaurus-mdx-loader-v2/node_modules/unist-util-visit-parents": { + "version": "5.1.3", + "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-5.1.3.tgz", + "integrity": "sha512-x6+y8g7wWMyQhL1iZfhIPhDAs7Xwbn9nRosDXl7qoPTSCy0yNxnKc+hWokFifWQIDGi154rdUqKvbCa4+1kLhg==", + "dependencies": { + "@types/unist": "^2.0.0", + "unist-util-is": "^5.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/docusaurus-mdx-loader-v2/node_modules/vfile": { + "version": "5.3.7", + "resolved": "https://registry.npmjs.org/vfile/-/vfile-5.3.7.tgz", + "integrity": "sha512-r7qlzkgErKjobAmyNIkkSpizsFPYiUPuJb5pNW1RB4JcYVZhs4lIbVqk8XPk033CV/1z8ss5pkax8SuhGpcG8g==", + "dependencies": { + "@types/unist": "^2.0.0", + "is-buffer": "^2.0.0", + "unist-util-stringify-position": "^3.0.0", + "vfile-message": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/docusaurus-mdx-loader-v2/node_modules/vfile-message": { + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-3.1.4.tgz", + "integrity": "sha512-fa0Z6P8HUrQN4BZaX05SIVXic+7kE3b05PWAtPuYP9QLHsLKYR7/AlLW3NtOrpXRLeawpDLMsVkmk5DG0NXgWw==", + "dependencies": { + "@types/unist": "^2.0.0", + "unist-util-stringify-position": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/docusaurus-node-polyfills": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/docusaurus-node-polyfills/-/docusaurus-node-polyfills-1.0.0.tgz", + "integrity": "sha512-TUX/smcS0NcoiBKThM3hNlAx7Z8jJr/F5UKR+FiMlsJJbt1KYWgbj3blgUTk/ad0+hfe2vaytJZX4r0GeK6oRQ==", + "dev": true, + "dependencies": { + "node-polyfill-webpack-plugin": "^1.1.2", + "os-browserify": "^0.3.0", + "process": "^0.11.10" + }, + "peerDependencies": { + "webpack": ">=5" + } + }, + "node_modules/docusaurus-plugin-image-zoom": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/docusaurus-plugin-image-zoom/-/docusaurus-plugin-image-zoom-0.1.4.tgz", + "integrity": "sha512-Fc16FUUVKgvF0OuMapsau0GFopSAH99t+FCRtTpF8n2Wt8toi5Et3xHKPNvuCb85PTtDuNfFUp5m9eCij0dXlw==", + "dependencies": { + "medium-zoom": "^1.0.6" + } + }, + "node_modules/docusaurus-theme-mdx-v2": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/docusaurus-theme-mdx-v2/-/docusaurus-theme-mdx-v2-0.1.2.tgz", + "integrity": "sha512-n5L4nx0LV5coTkZYS+owXmM0ACXWCbd4ou7aDrWIMm3YH7XPusSNelJpYsUKJxHFER/+czitbmieboFe4I7lMQ==", + "dependencies": { + "@mdx-js/react": "^2.1.0", + "docusaurus-mdx-loader-v2": "0.1.2" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/dom-converter": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/dom-converter/-/dom-converter-0.2.0.tgz", + "integrity": "sha512-gd3ypIPfOMr9h5jIKq8E3sHOTCjeirnl0WK5ZdS1AW0Odt0b1PaWaHdJ4Qk4klv+YB9aJBS7mESXjFoDQPu6DA==", + "dependencies": { + "utila": "~0.4" + } + }, + "node_modules/dom-helpers": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/dom-helpers/-/dom-helpers-5.2.1.tgz", + "integrity": "sha512-nRCa7CK3VTrM2NmGkIy4cbK7IZlgBE/PYMn55rrXefr5xXDP0LdtfPnblFDoVdcAfslJ7or6iqAUnx0CCGIWQA==", + "dependencies": { + "@babel/runtime": "^7.8.7", + "csstype": "^3.0.2" + } + }, + "node_modules/dom-serializer": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-2.0.0.tgz", + "integrity": "sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==", + "dependencies": { + "domelementtype": "^2.3.0", + "domhandler": "^5.0.2", + "entities": "^4.2.0" + }, + "funding": { + "url": "https://github.com/cheeriojs/dom-serializer?sponsor=1" + } + }, + "node_modules/domain-browser": { + "version": "4.22.0", + "resolved": "https://registry.npmjs.org/domain-browser/-/domain-browser-4.22.0.tgz", + "integrity": "sha512-IGBwjF7tNk3cwypFNH/7bfzBcgSCbaMOD3GsaY1AU/JRrnHnYgEM0+9kQt52iZxjNsjBtJYtao146V+f8jFZNw==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://bevry.me/fund" + } + }, + "node_modules/domelementtype": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-2.3.0.tgz", + "integrity": "sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fb55" + } + ] + }, + "node_modules/domhandler": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/domhandler/-/domhandler-5.0.3.tgz", + "integrity": "sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==", + "dependencies": { + "domelementtype": "^2.3.0" + }, + "engines": { + "node": ">= 4" + }, + "funding": { + "url": "https://github.com/fb55/domhandler?sponsor=1" + } + }, + "node_modules/domutils": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/domutils/-/domutils-3.1.0.tgz", + "integrity": "sha512-H78uMmQtI2AhgDJjWeQmHwJJ2bLPD3GMmO7Zja/ZZh84wkm+4ut+IUnUdRa8uCGX88DiVx1j6FRe1XfxEgjEZA==", + "dependencies": { + "dom-serializer": "^2.0.0", + "domelementtype": "^2.3.0", + "domhandler": "^5.0.3" + }, + "funding": { + "url": "https://github.com/fb55/domutils?sponsor=1" + } + }, + "node_modules/dot-case": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/dot-case/-/dot-case-3.0.4.tgz", + "integrity": "sha512-Kv5nKlh6yRrdrGvxeJ2e5y2eRUpkUosIW4A2AS38zwSz27zu7ufDwQPi5Jhs3XAlGNetl3bmnGhQsMtkKJnj3w==", + "dependencies": { + "no-case": "^3.0.4", + "tslib": "^2.0.3" + } + }, + "node_modules/dot-prop": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-5.3.0.tgz", + "integrity": "sha512-QM8q3zDe58hqUqjraQOmzZ1LIH9SWQJTlEKCH4kJ2oQvLZk7RbQXvtDM2XEq3fwkV9CCvvH4LA0AV+ogFsBM2Q==", + "dependencies": { + "is-obj": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/dot-prop/node_modules/is-obj": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-2.0.0.tgz", + "integrity": "sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w==", + "engines": { + "node": ">=8" + } }, "node_modules/duplexer": { "version": "0.1.2", @@ -7247,47 +7656,147 @@ "node": ">=4.0" } }, - "node_modules/esutils": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", - "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", - "engines": { - "node": ">=0.10.0" + "node_modules/estree-util-attach-comments": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/estree-util-attach-comments/-/estree-util-attach-comments-2.1.1.tgz", + "integrity": "sha512-+5Ba/xGGS6mnwFbXIuQiDPTbuTxuMCooq3arVv7gPZtYpjp+VXH/NkHAP35OOefPhNG/UGqU3vt/LTABwcHX0w==", + "dependencies": { + "@types/estree": "^1.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/eta": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/eta/-/eta-2.2.0.tgz", - "integrity": "sha512-UVQ72Rqjy/ZKQalzV5dCCJP80GrmPrMxh6NlNf+erV6ObL0ZFkhCstWRawS85z3smdr3d2wXPsZEY7rDPfGd2g==", - "engines": { - "node": ">=6.0.0" + "node_modules/estree-util-build-jsx": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/estree-util-build-jsx/-/estree-util-build-jsx-2.2.2.tgz", + "integrity": "sha512-m56vOXcOBuaF+Igpb9OPAy7f9w9OIkb5yhjsZuaPm7HoGi4oTOQi0h2+yZ+AtKklYFZ+rPC4n0wYCJCEU1ONqg==", + "dependencies": { + "@types/estree-jsx": "^1.0.0", + "estree-util-is-identifier-name": "^2.0.0", + "estree-walker": "^3.0.0" }, "funding": { - "url": "https://github.com/eta-dev/eta?sponsor=1" + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/etag": { - "version": "1.8.1", - "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", - "integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==", - "engines": { - "node": ">= 0.6" + "node_modules/estree-util-is-identifier-name": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/estree-util-is-identifier-name/-/estree-util-is-identifier-name-2.1.0.tgz", + "integrity": "sha512-bEN9VHRyXAUOjkKVQVvArFym08BTWB0aJPppZZr0UNyAqWsLaVfAqP7hbaTJjzHifmB5ebnR8Wm7r7yGN/HonQ==", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/eval": { - "version": "0.1.8", - "resolved": "https://registry.npmjs.org/eval/-/eval-0.1.8.tgz", - "integrity": "sha512-EzV94NYKoO09GLXGjXj9JIlXijVck4ONSr5wiCWDvhsvj5jxSrzTmRU/9C1DyB6uToszLs8aifA6NQ7lEQdvFw==", + "node_modules/estree-util-to-js": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/estree-util-to-js/-/estree-util-to-js-1.2.0.tgz", + "integrity": "sha512-IzU74r1PK5IMMGZXUVZbmiu4A1uhiPgW5hm1GjcOfr4ZzHaMPpLNJjR7HjXiIOzi25nZDrgFTobHTkV5Q6ITjA==", "dependencies": { - "@types/node": "*", - "require-like": ">= 0.1.1" + "@types/estree-jsx": "^1.0.0", + "astring": "^1.8.0", + "source-map": "^0.7.0" }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/estree-util-to-js/node_modules/source-map": { + "version": "0.7.4", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.4.tgz", + "integrity": "sha512-l3BikUxvPOcn5E74dZiq5BGsTb5yEwhaTSzccU6t4sDOH8NWJCstKO5QT2CvtFoK6F0saL7p9xHAqHOlCPJygA==", "engines": { - "node": ">= 0.8" + "node": ">= 8" } }, - "node_modules/eventemitter3": { - "version": "4.0.7", + "node_modules/estree-util-value-to-estree": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/estree-util-value-to-estree/-/estree-util-value-to-estree-1.3.0.tgz", + "integrity": "sha512-Y+ughcF9jSUJvncXwqRageavjrNPAI+1M/L3BI3PyLp1nmgYTGUXU6t5z1Y7OWuThoDdhPME07bQU+d5LxdJqw==", + "dependencies": { + "is-plain-obj": "^3.0.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/estree-util-value-to-estree/node_modules/is-plain-obj": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-3.0.0.tgz", + "integrity": "sha512-gwsOE28k+23GP1B6vFl1oVh/WOzmawBrKwo5Ev6wMKzPkaXaCDIQKzLnvsA42DRlbVTWorkgTKIviAKCWkfUwA==", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/estree-util-visit": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/estree-util-visit/-/estree-util-visit-1.2.1.tgz", + "integrity": "sha512-xbgqcrkIVbIG+lI/gzbvd9SGTJL4zqJKBFttUl5pP27KhAjtMKbX/mQXJ7qgyXpMgVy/zvpm0xoQQaGL8OloOw==", + "dependencies": { + "@types/estree-jsx": "^1.0.0", + "@types/unist": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/estree-walker": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-3.0.3.tgz", + "integrity": "sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==", + "dependencies": { + "@types/estree": "^1.0.0" + } + }, + "node_modules/esutils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", + "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/eta": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/eta/-/eta-2.2.0.tgz", + "integrity": "sha512-UVQ72Rqjy/ZKQalzV5dCCJP80GrmPrMxh6NlNf+erV6ObL0ZFkhCstWRawS85z3smdr3d2wXPsZEY7rDPfGd2g==", + "engines": { + "node": ">=6.0.0" + }, + "funding": { + "url": "https://github.com/eta-dev/eta?sponsor=1" + } + }, + "node_modules/etag": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", + "integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/eval": { + "version": "0.1.8", + "resolved": "https://registry.npmjs.org/eval/-/eval-0.1.8.tgz", + "integrity": "sha512-EzV94NYKoO09GLXGjXj9JIlXijVck4ONSr5wiCWDvhsvj5jxSrzTmRU/9C1DyB6uToszLs8aifA6NQ7lEQdvFw==", + "dependencies": { + "@types/node": "*", + "require-like": ">= 0.1.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/eventemitter3": { + "version": "4.0.7", "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-4.0.7.tgz", "integrity": "sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==" }, @@ -8606,6 +9115,88 @@ "resolved": "https://registry.npmjs.org/parse5/-/parse5-6.0.1.tgz", "integrity": "sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw==" }, + "node_modules/hast-util-to-estree": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/hast-util-to-estree/-/hast-util-to-estree-2.3.3.tgz", + "integrity": "sha512-ihhPIUPxN0v0w6M5+IiAZZrn0LH2uZomeWwhn7uP7avZC6TE7lIiEh2yBMPr5+zi1aUCXq6VoYRgs2Bw9xmycQ==", + "dependencies": { + "@types/estree": "^1.0.0", + "@types/estree-jsx": "^1.0.0", + "@types/hast": "^2.0.0", + "@types/unist": "^2.0.0", + "comma-separated-tokens": "^2.0.0", + "estree-util-attach-comments": "^2.0.0", + "estree-util-is-identifier-name": "^2.0.0", + "hast-util-whitespace": "^2.0.0", + "mdast-util-mdx-expression": "^1.0.0", + "mdast-util-mdxjs-esm": "^1.0.0", + "property-information": "^6.0.0", + "space-separated-tokens": "^2.0.0", + "style-to-object": "^0.4.1", + "unist-util-position": "^4.0.0", + "zwitch": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-to-estree/node_modules/comma-separated-tokens": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/comma-separated-tokens/-/comma-separated-tokens-2.0.3.tgz", + "integrity": "sha512-Fu4hJdvzeylCfQPp9SGWidpzrMs7tTrlu6Vb8XGaRGck8QSNZJJp538Wrb60Lax4fPwR64ViY468OIUTbRlGZg==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/hast-util-to-estree/node_modules/property-information": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/property-information/-/property-information-6.2.0.tgz", + "integrity": "sha512-kma4U7AFCTwpqq5twzC1YVIDXSqg6qQK6JN0smOw8fgRy1OkMi0CYSzFmsy6dnqSenamAtj0CyXMUJ1Mf6oROg==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/hast-util-to-estree/node_modules/space-separated-tokens": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/space-separated-tokens/-/space-separated-tokens-2.0.2.tgz", + "integrity": "sha512-PEGlAwrG8yXGXRjW32fGbg66JAlOAwbObuqVoJpv/mRgoWDQfgH1wDPvtzWyUSNAXBGSk8h755YDbbcEy3SH2Q==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/hast-util-to-estree/node_modules/style-to-object": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/style-to-object/-/style-to-object-0.4.1.tgz", + "integrity": "sha512-HFpbb5gr2ypci7Qw+IOhnP2zOU7e77b+rzM+wTzXzfi1PrtBCX0E7Pk4wL4iTLnhzZ+JgEGAhX81ebTg/aYjQw==", + "dependencies": { + "inline-style-parser": "0.1.1" + } + }, + "node_modules/hast-util-to-estree/node_modules/unist-util-position": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/unist-util-position/-/unist-util-position-4.0.4.tgz", + "integrity": "sha512-kUBE91efOWfIVBo8xzh/uZQ7p9ffYRtUbMRZBNFYwf0RK8koUMx6dGUfwylLOKmaT2cs4wSW96QoYUSXAyEtpg==", + "dependencies": { + "@types/unist": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-to-estree/node_modules/zwitch": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/zwitch/-/zwitch-2.0.4.tgz", + "integrity": "sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/hast-util-to-parse5": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/hast-util-to-parse5/-/hast-util-to-parse5-6.0.0.tgz", @@ -8622,6 +9213,15 @@ "url": "https://opencollective.com/unified" } }, + "node_modules/hast-util-whitespace": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/hast-util-whitespace/-/hast-util-whitespace-2.0.1.tgz", + "integrity": "sha512-nAxA0v8+vXSBDt3AnRUNjyRIQ0rD+ntpbAp4LnPkumc5M9yUbSMa4XDU9Q6etY4f1Wp4bNgvc1yjiZtsTTrSng==", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, "node_modules/hastscript": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/hastscript/-/hastscript-6.0.0.tgz", @@ -9472,6 +10072,14 @@ "node": ">=0.10.0" } }, + "node_modules/is-reference": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/is-reference/-/is-reference-3.0.1.tgz", + "integrity": "sha512-baJJdQLiYaJdvFbJqXrcGv3WU3QCzBlUcI5QhbesIm6/xPsvmO+2CDoi/GMOFBQEQm+PXkwOPrp9KK5ozZsp2w==", + "dependencies": { + "@types/estree": "*" + } + }, "node_modules/is-regexp": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/is-regexp/-/is-regexp-1.0.0.tgz", @@ -9952,6 +10560,15 @@ "resolved": "https://registry.npmjs.org/lodash.uniq/-/lodash.uniq-4.5.0.tgz", "integrity": "sha512-xfBaXQd9ryd9dlSDvnvI0lvxfLJlYAZzXomUYzLKtUeOQvOP5piqAWuGtrhWeqaXK9hhoM/iyJc5AV+XfsX3HQ==" }, + "node_modules/longest-streak": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/longest-streak/-/longest-streak-2.0.4.tgz", + "integrity": "sha512-vM6rUVCVUJJt33bnmHiZEvr7wPT78ztX7rojL+LW51bHtLh6HTjx84LA5W4+oa6aKEJA7jJu5LR6vQRBpA5DVg==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/loose-envify": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", @@ -9987,6 +10604,14 @@ "yallist": "^3.0.2" } }, + "node_modules/lz-string": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/lz-string/-/lz-string-1.5.0.tgz", + "integrity": "sha512-h5bgJWpxJNswbU7qCrV0tIKQCaS3blPDrqKWx+QxzuzL1zGUzij9XCWLrSLsJPu5t+eWA/ycetzYAO5IOMcWAQ==", + "bin": { + "lz-string": "bin/bin.js" + } + }, "node_modules/make-dir": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", @@ -10058,6 +10683,26 @@ "url": "https://github.com/sponsors/wooorm" } }, + "node_modules/markdown-extensions": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/markdown-extensions/-/markdown-extensions-1.1.1.tgz", + "integrity": "sha512-WWC0ZuMzCyDHYCasEGs4IPvLyTGftYwh6wIEOULOF0HXcqZlhwRzrK0w2VUlxWA98xnvb/jszw4ZSkJ6ADpM6Q==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/markdown-table": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/markdown-table/-/markdown-table-2.0.0.tgz", + "integrity": "sha512-Ezda85ToJUBhM6WGaG6veasyym+Tbs3cMAw/ZhOPqXiYsr0jgocBV3j3nx+4lk47plLlIqjwuTm/ywVI+zjJ/A==", + "dependencies": { + "repeat-string": "^1.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/md5.js": { "version": "1.3.5", "resolved": "https://registry.npmjs.org/md5.js/-/md5.js-1.3.5.tgz", @@ -10093,6 +10738,20 @@ "url": "https://opencollective.com/unified" } }, + "node_modules/mdast-util-find-and-replace": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/mdast-util-find-and-replace/-/mdast-util-find-and-replace-1.1.1.tgz", + "integrity": "sha512-9cKl33Y21lyckGzpSmEQnIDjEfeeWelN5s1kUW1LwdB0Fkuq2u+4GdqcGEygYxJE8GVqCl0741bYXHgamfWAZA==", + "dependencies": { + "escape-string-regexp": "^4.0.0", + "unist-util-is": "^4.0.0", + "unist-util-visit-parents": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, "node_modules/mdast-util-from-markdown": { "version": "1.3.1", "resolved": "https://registry.npmjs.org/mdast-util-from-markdown/-/mdast-util-from-markdown-1.3.1.tgz", @@ -10128,141 +10787,1021 @@ "url": "https://opencollective.com/unified" } }, - "node_modules/mdast-util-to-hast": { - "version": "10.0.1", - "resolved": "https://registry.npmjs.org/mdast-util-to-hast/-/mdast-util-to-hast-10.0.1.tgz", - "integrity": "sha512-BW3LM9SEMnjf4HXXVApZMt8gLQWVNXc3jryK0nJu/rOXPOnlkUjmdkDlmxMirpbU9ILncGFIwLH/ubnWBbcdgA==", + "node_modules/mdast-util-gfm": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/mdast-util-gfm/-/mdast-util-gfm-0.1.2.tgz", + "integrity": "sha512-NNkhDx/qYcuOWB7xHUGWZYVXvjPFFd6afg6/e2g+SV4r9q5XUcCbV4Wfa3DLYIiD+xAEZc6K4MGaE/m0KDcPwQ==", "dependencies": { - "@types/mdast": "^3.0.0", - "@types/unist": "^2.0.0", - "mdast-util-definitions": "^4.0.0", - "mdurl": "^1.0.0", - "unist-builder": "^2.0.0", - "unist-util-generated": "^1.0.0", - "unist-util-position": "^3.0.0", - "unist-util-visit": "^2.0.0" + "mdast-util-gfm-autolink-literal": "^0.1.0", + "mdast-util-gfm-strikethrough": "^0.2.0", + "mdast-util-gfm-table": "^0.1.0", + "mdast-util-gfm-task-list-item": "^0.1.0", + "mdast-util-to-markdown": "^0.6.1" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" } }, - "node_modules/mdast-util-to-string": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-2.0.0.tgz", - "integrity": "sha512-AW4DRS3QbBayY/jJmD8437V1Gombjf8RSOUCMFBuo5iHi58AGEgVCKQ+ezHkZZDpAQS75hcBMpLqjpJTjtUL7w==", + "node_modules/mdast-util-gfm-autolink-literal": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-autolink-literal/-/mdast-util-gfm-autolink-literal-0.1.3.tgz", + "integrity": "sha512-GjmLjWrXg1wqMIO9+ZsRik/s7PLwTaeCHVB7vRxUwLntZc8mzmTsLVr6HW1yLokcnhfURsn5zmSVdi3/xWWu1A==", + "dependencies": { + "ccount": "^1.0.0", + "mdast-util-find-and-replace": "^1.1.0", + "micromark": "^2.11.3" + }, "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" } }, - "node_modules/mdn-data": { - "version": "2.0.14", - "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.14.tgz", - "integrity": "sha512-dn6wd0uw5GsdswPFfsgMp5NSB0/aDe6fK94YJV/AJDYXL6HVLWBsxeq7js7Ad+mU2K9LAlwpk6kN2D5mwCPVow==" - }, - "node_modules/mdurl": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/mdurl/-/mdurl-1.0.1.tgz", - "integrity": "sha512-/sKlQJCBYVY9Ers9hqzKou4H6V5UWc/M59TH2dvkt+84itfnq7uFOMLpOiOS4ujvHP4etln18fmIxA5R5fll0g==" - }, - "node_modules/media-typer": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", - "integrity": "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==", - "engines": { - "node": ">= 0.6" + "node_modules/mdast-util-gfm-autolink-literal/node_modules/micromark": { + "version": "2.11.4", + "resolved": "https://registry.npmjs.org/micromark/-/micromark-2.11.4.tgz", + "integrity": "sha512-+WoovN/ppKolQOFIAajxi7Lu9kInbPxFuTBVEavFcL8eAfVstoc5MocPmqBeAdBOJV00uaVjegzH4+MA0DN/uA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "debug": "^4.0.0", + "parse-entities": "^2.0.0" } }, - "node_modules/medium-zoom": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/medium-zoom/-/medium-zoom-1.0.8.tgz", - "integrity": "sha512-CjFVuFq/IfrdqesAXfg+hzlDKu6A2n80ZIq0Kl9kWjoHh9j1N9Uvk5X0/MmN0hOfm5F9YBswlClhcwnmtwz7gA==" - }, - "node_modules/memfs": { - "version": "3.5.3", - "resolved": "https://registry.npmjs.org/memfs/-/memfs-3.5.3.tgz", - "integrity": "sha512-UERzLsxzllchadvbPs5aolHh65ISpKpM+ccLbOJ8/vvpBKmAWf+la7dXFy7Mr0ySHbdHrFv5kGFCUHHe6GFEmw==", + "node_modules/mdast-util-gfm-strikethrough": { + "version": "0.2.3", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-strikethrough/-/mdast-util-gfm-strikethrough-0.2.3.tgz", + "integrity": "sha512-5OQLXpt6qdbttcDG/UxYY7Yjj3e8P7X16LzvpX8pIQPYJ/C2Z1qFGMmcw+1PZMUM3Z8wt8NRfYTvCni93mgsgA==", "dependencies": { - "fs-monkey": "^1.0.4" + "mdast-util-to-markdown": "^0.6.0" }, - "engines": { - "node": ">= 4.0.0" + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/memoize-one": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/memoize-one/-/memoize-one-5.2.1.tgz", - "integrity": "sha512-zYiwtZUcYyXKo/np96AGZAckk+FWWsUdJ3cHGGmld7+AhvcWmQyGCYUh1hc4Q/pkOhb65dQR/pqCyK0cOaHz4Q==" - }, - "node_modules/meow": { - "version": "9.0.0", - "resolved": "https://registry.npmjs.org/meow/-/meow-9.0.0.tgz", - "integrity": "sha512-+obSblOQmRhcyBt62furQqRAQpNyWXo8BuQ5bN7dG8wmwQ+vwHKp/rCFD4CrTP8CsDQD1sjoZ94K417XEUk8IQ==", - "dev": true, + "node_modules/mdast-util-gfm-table": { + "version": "0.1.6", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-table/-/mdast-util-gfm-table-0.1.6.tgz", + "integrity": "sha512-j4yDxQ66AJSBwGkbpFEp9uG/LS1tZV3P33fN1gkyRB2LoRL+RR3f76m0HPHaby6F4Z5xr9Fv1URmATlRRUIpRQ==", "dependencies": { - "@types/minimist": "^1.2.0", - "camelcase-keys": "^6.2.2", - "decamelize": "^1.2.0", - "decamelize-keys": "^1.1.0", - "hard-rejection": "^2.1.0", - "minimist-options": "4.1.0", - "normalize-package-data": "^3.0.0", - "read-pkg-up": "^7.0.1", - "redent": "^3.0.0", - "trim-newlines": "^3.0.0", - "type-fest": "^0.18.0", - "yargs-parser": "^20.2.3" - }, - "engines": { - "node": ">=10" + "markdown-table": "^2.0.0", + "mdast-util-to-markdown": "~0.6.0" }, "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-gfm-task-list-item": { + "version": "0.1.6", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-task-list-item/-/mdast-util-gfm-task-list-item-0.1.6.tgz", + "integrity": "sha512-/d51FFIfPsSmCIRNp7E6pozM9z1GYPIkSy1urQ8s/o4TC22BZ7DqfHFWiqBD23bc7J3vV1Fc9O4QIHBlfuit8A==", + "dependencies": { + "mdast-util-to-markdown": "~0.6.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdx": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/mdast-util-mdx/-/mdast-util-mdx-2.0.1.tgz", + "integrity": "sha512-38w5y+r8nyKlGvNjSEqWrhG0w5PmnRA+wnBvm+ulYCct7nsGYhFVb0lljS9bQav4psDAS1eGkP2LMVcZBi/aqw==", + "dependencies": { + "mdast-util-from-markdown": "^1.0.0", + "mdast-util-mdx-expression": "^1.0.0", + "mdast-util-mdx-jsx": "^2.0.0", + "mdast-util-mdxjs-esm": "^1.0.0", + "mdast-util-to-markdown": "^1.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdx-expression": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/mdast-util-mdx-expression/-/mdast-util-mdx-expression-1.3.2.tgz", + "integrity": "sha512-xIPmR5ReJDu/DHH1OoIT1HkuybIfRGYRywC+gJtI7qHjCJp/M9jrmBEJW22O8lskDWm562BX2W8TiAwRTb0rKA==", + "dependencies": { + "@types/estree-jsx": "^1.0.0", + "@types/hast": "^2.0.0", + "@types/mdast": "^3.0.0", + "mdast-util-from-markdown": "^1.0.0", + "mdast-util-to-markdown": "^1.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdx-expression/node_modules/longest-streak": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/longest-streak/-/longest-streak-3.1.0.tgz", + "integrity": "sha512-9Ri+o0JYgehTaVBBDoMqIl8GXtbWg711O3srftcHhZ0dqnETqLaoIK0x17fUw9rFSlK/0NlsKe0Ahhyl5pXE2g==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/mdast-util-mdx-expression/node_modules/mdast-util-to-markdown": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/mdast-util-to-markdown/-/mdast-util-to-markdown-1.5.0.tgz", + "integrity": "sha512-bbv7TPv/WC49thZPg3jXuqzuvI45IL2EVAr/KxF0BSdHsU0ceFHOmwQn6evxAh1GaoK/6GQ1wp4R4oW2+LFL/A==", + "dependencies": { + "@types/mdast": "^3.0.0", + "@types/unist": "^2.0.0", + "longest-streak": "^3.0.0", + "mdast-util-phrasing": "^3.0.0", + "mdast-util-to-string": "^3.0.0", + "micromark-util-decode-string": "^1.0.0", + "unist-util-visit": "^4.0.0", + "zwitch": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdx-expression/node_modules/mdast-util-to-string": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-3.2.0.tgz", + "integrity": "sha512-V4Zn/ncyN1QNSqSBxTrMOLpjr+IKdHl2v3KVLoWmDPscP4r9GcCi71gjgvUV1SFSKh92AjAG4peFuBl2/YgCJg==", + "dependencies": { + "@types/mdast": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdx-expression/node_modules/unist-util-is": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-5.2.1.tgz", + "integrity": "sha512-u9njyyfEh43npf1M+yGKDGVPbY/JWEemg5nH05ncKPfi+kBbKBJoTdsogMu33uhytuLlv9y0O7GH7fEdwLdLQw==", + "dependencies": { + "@types/unist": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdx-expression/node_modules/unist-util-visit": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-4.1.2.tgz", + "integrity": "sha512-MSd8OUGISqHdVvfY9TPhyK2VdUrPgxkUtWSuMHF6XAAFuL4LokseigBnZtPnJMu+FbynTkFNnFlyjxpVKujMRg==", + "dependencies": { + "@types/unist": "^2.0.0", + "unist-util-is": "^5.0.0", + "unist-util-visit-parents": "^5.1.1" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdx-expression/node_modules/unist-util-visit-parents": { + "version": "5.1.3", + "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-5.1.3.tgz", + "integrity": "sha512-x6+y8g7wWMyQhL1iZfhIPhDAs7Xwbn9nRosDXl7qoPTSCy0yNxnKc+hWokFifWQIDGi154rdUqKvbCa4+1kLhg==", + "dependencies": { + "@types/unist": "^2.0.0", + "unist-util-is": "^5.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdx-expression/node_modules/zwitch": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/zwitch/-/zwitch-2.0.4.tgz", + "integrity": "sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/mdast-util-mdx-jsx": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/mdast-util-mdx-jsx/-/mdast-util-mdx-jsx-2.1.4.tgz", + "integrity": "sha512-DtMn9CmVhVzZx3f+optVDF8yFgQVt7FghCRNdlIaS3X5Bnym3hZwPbg/XW86vdpKjlc1PVj26SpnLGeJBXD3JA==", + "dependencies": { + "@types/estree-jsx": "^1.0.0", + "@types/hast": "^2.0.0", + "@types/mdast": "^3.0.0", + "@types/unist": "^2.0.0", + "ccount": "^2.0.0", + "mdast-util-from-markdown": "^1.1.0", + "mdast-util-to-markdown": "^1.3.0", + "parse-entities": "^4.0.0", + "stringify-entities": "^4.0.0", + "unist-util-remove-position": "^4.0.0", + "unist-util-stringify-position": "^3.0.0", + "vfile-message": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdx-jsx/node_modules/ccount": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/ccount/-/ccount-2.0.1.tgz", + "integrity": "sha512-eyrF0jiFpY+3drT6383f1qhkbGsLSifNAjA61IUjZjmLCWjItY6LB9ft9YhoDgwfmclB2zhu51Lc7+95b8NRAg==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/mdast-util-mdx-jsx/node_modules/character-entities-legacy": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/character-entities-legacy/-/character-entities-legacy-3.0.0.tgz", + "integrity": "sha512-RpPp0asT/6ufRm//AJVwpViZbGM/MkjQFxJccQRHmISF/22NBtsHqAWmL+/pmkPWoIUJdWyeVleTl1wydHATVQ==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/mdast-util-mdx-jsx/node_modules/character-reference-invalid": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/character-reference-invalid/-/character-reference-invalid-2.0.1.tgz", + "integrity": "sha512-iBZ4F4wRbyORVsu0jPV7gXkOsGYjGHPmAyv+HiHG8gi5PtC9KI2j1+v8/tlibRvjoWX027ypmG/n0HtO5t7unw==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/mdast-util-mdx-jsx/node_modules/is-alphabetical": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-alphabetical/-/is-alphabetical-2.0.1.tgz", + "integrity": "sha512-FWyyY60MeTNyeSRpkM2Iry0G9hpr7/9kD40mD/cGQEuilcZYS4okz8SN2Q6rLCJ8gbCt6fN+rC+6tMGS99LaxQ==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/mdast-util-mdx-jsx/node_modules/is-alphanumerical": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-alphanumerical/-/is-alphanumerical-2.0.1.tgz", + "integrity": "sha512-hmbYhX/9MUMF5uh7tOXyK/n0ZvWpad5caBA17GsC6vyuCqaWliRG5K1qS9inmUhEMaOBIW7/whAnSwveW/LtZw==", + "dependencies": { + "is-alphabetical": "^2.0.0", + "is-decimal": "^2.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/mdast-util-mdx-jsx/node_modules/is-decimal": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-decimal/-/is-decimal-2.0.1.tgz", + "integrity": "sha512-AAB9hiomQs5DXWcRB1rqsxGUstbRroFOPPVAomNk/3XHR5JyEZChOyTWe2oayKnsSsr/kcGqF+z6yuH6HHpN0A==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/mdast-util-mdx-jsx/node_modules/is-hexadecimal": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-hexadecimal/-/is-hexadecimal-2.0.1.tgz", + "integrity": "sha512-DgZQp241c8oO6cA1SbTEWiXeoxV42vlcJxgH+B3hi1AiqqKruZR3ZGF8In3fj4+/y/7rHvlOZLZtgJ/4ttYGZg==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/mdast-util-mdx-jsx/node_modules/longest-streak": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/longest-streak/-/longest-streak-3.1.0.tgz", + "integrity": "sha512-9Ri+o0JYgehTaVBBDoMqIl8GXtbWg711O3srftcHhZ0dqnETqLaoIK0x17fUw9rFSlK/0NlsKe0Ahhyl5pXE2g==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/mdast-util-mdx-jsx/node_modules/mdast-util-to-markdown": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/mdast-util-to-markdown/-/mdast-util-to-markdown-1.5.0.tgz", + "integrity": "sha512-bbv7TPv/WC49thZPg3jXuqzuvI45IL2EVAr/KxF0BSdHsU0ceFHOmwQn6evxAh1GaoK/6GQ1wp4R4oW2+LFL/A==", + "dependencies": { + "@types/mdast": "^3.0.0", + "@types/unist": "^2.0.0", + "longest-streak": "^3.0.0", + "mdast-util-phrasing": "^3.0.0", + "mdast-util-to-string": "^3.0.0", + "micromark-util-decode-string": "^1.0.0", + "unist-util-visit": "^4.0.0", + "zwitch": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdx-jsx/node_modules/mdast-util-to-string": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-3.2.0.tgz", + "integrity": "sha512-V4Zn/ncyN1QNSqSBxTrMOLpjr+IKdHl2v3KVLoWmDPscP4r9GcCi71gjgvUV1SFSKh92AjAG4peFuBl2/YgCJg==", + "dependencies": { + "@types/mdast": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdx-jsx/node_modules/parse-entities": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/parse-entities/-/parse-entities-4.0.1.tgz", + "integrity": "sha512-SWzvYcSJh4d/SGLIOQfZ/CoNv6BTlI6YEQ7Nj82oDVnRpwe/Z/F1EMx42x3JAOwGBlCjeCH0BRJQbQ/opHL17w==", + "dependencies": { + "@types/unist": "^2.0.0", + "character-entities": "^2.0.0", + "character-entities-legacy": "^3.0.0", + "character-reference-invalid": "^2.0.0", + "decode-named-character-reference": "^1.0.0", + "is-alphanumerical": "^2.0.0", + "is-decimal": "^2.0.0", + "is-hexadecimal": "^2.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/mdast-util-mdx-jsx/node_modules/unist-util-is": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-5.2.1.tgz", + "integrity": "sha512-u9njyyfEh43npf1M+yGKDGVPbY/JWEemg5nH05ncKPfi+kBbKBJoTdsogMu33uhytuLlv9y0O7GH7fEdwLdLQw==", + "dependencies": { + "@types/unist": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdx-jsx/node_modules/unist-util-remove-position": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/unist-util-remove-position/-/unist-util-remove-position-4.0.2.tgz", + "integrity": "sha512-TkBb0HABNmxzAcfLf4qsIbFbaPDvMO6wa3b3j4VcEzFVaw1LBKwnW4/sRJ/atSLSzoIg41JWEdnE7N6DIhGDGQ==", + "dependencies": { + "@types/unist": "^2.0.0", + "unist-util-visit": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdx-jsx/node_modules/unist-util-visit": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-4.1.2.tgz", + "integrity": "sha512-MSd8OUGISqHdVvfY9TPhyK2VdUrPgxkUtWSuMHF6XAAFuL4LokseigBnZtPnJMu+FbynTkFNnFlyjxpVKujMRg==", + "dependencies": { + "@types/unist": "^2.0.0", + "unist-util-is": "^5.0.0", + "unist-util-visit-parents": "^5.1.1" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdx-jsx/node_modules/unist-util-visit-parents": { + "version": "5.1.3", + "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-5.1.3.tgz", + "integrity": "sha512-x6+y8g7wWMyQhL1iZfhIPhDAs7Xwbn9nRosDXl7qoPTSCy0yNxnKc+hWokFifWQIDGi154rdUqKvbCa4+1kLhg==", + "dependencies": { + "@types/unist": "^2.0.0", + "unist-util-is": "^5.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdx-jsx/node_modules/vfile-message": { + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-3.1.4.tgz", + "integrity": "sha512-fa0Z6P8HUrQN4BZaX05SIVXic+7kE3b05PWAtPuYP9QLHsLKYR7/AlLW3NtOrpXRLeawpDLMsVkmk5DG0NXgWw==", + "dependencies": { + "@types/unist": "^2.0.0", + "unist-util-stringify-position": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdx-jsx/node_modules/zwitch": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/zwitch/-/zwitch-2.0.4.tgz", + "integrity": "sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/mdast-util-mdx/node_modules/longest-streak": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/longest-streak/-/longest-streak-3.1.0.tgz", + "integrity": "sha512-9Ri+o0JYgehTaVBBDoMqIl8GXtbWg711O3srftcHhZ0dqnETqLaoIK0x17fUw9rFSlK/0NlsKe0Ahhyl5pXE2g==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/mdast-util-mdx/node_modules/mdast-util-to-markdown": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/mdast-util-to-markdown/-/mdast-util-to-markdown-1.5.0.tgz", + "integrity": "sha512-bbv7TPv/WC49thZPg3jXuqzuvI45IL2EVAr/KxF0BSdHsU0ceFHOmwQn6evxAh1GaoK/6GQ1wp4R4oW2+LFL/A==", + "dependencies": { + "@types/mdast": "^3.0.0", + "@types/unist": "^2.0.0", + "longest-streak": "^3.0.0", + "mdast-util-phrasing": "^3.0.0", + "mdast-util-to-string": "^3.0.0", + "micromark-util-decode-string": "^1.0.0", + "unist-util-visit": "^4.0.0", + "zwitch": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdx/node_modules/mdast-util-to-string": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-3.2.0.tgz", + "integrity": "sha512-V4Zn/ncyN1QNSqSBxTrMOLpjr+IKdHl2v3KVLoWmDPscP4r9GcCi71gjgvUV1SFSKh92AjAG4peFuBl2/YgCJg==", + "dependencies": { + "@types/mdast": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdx/node_modules/unist-util-is": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-5.2.1.tgz", + "integrity": "sha512-u9njyyfEh43npf1M+yGKDGVPbY/JWEemg5nH05ncKPfi+kBbKBJoTdsogMu33uhytuLlv9y0O7GH7fEdwLdLQw==", + "dependencies": { + "@types/unist": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdx/node_modules/unist-util-visit": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-4.1.2.tgz", + "integrity": "sha512-MSd8OUGISqHdVvfY9TPhyK2VdUrPgxkUtWSuMHF6XAAFuL4LokseigBnZtPnJMu+FbynTkFNnFlyjxpVKujMRg==", + "dependencies": { + "@types/unist": "^2.0.0", + "unist-util-is": "^5.0.0", + "unist-util-visit-parents": "^5.1.1" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdx/node_modules/unist-util-visit-parents": { + "version": "5.1.3", + "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-5.1.3.tgz", + "integrity": "sha512-x6+y8g7wWMyQhL1iZfhIPhDAs7Xwbn9nRosDXl7qoPTSCy0yNxnKc+hWokFifWQIDGi154rdUqKvbCa4+1kLhg==", + "dependencies": { + "@types/unist": "^2.0.0", + "unist-util-is": "^5.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdx/node_modules/zwitch": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/zwitch/-/zwitch-2.0.4.tgz", + "integrity": "sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/mdast-util-mdxjs-esm": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/mdast-util-mdxjs-esm/-/mdast-util-mdxjs-esm-1.3.1.tgz", + "integrity": "sha512-SXqglS0HrEvSdUEfoXFtcg7DRl7S2cwOXc7jkuusG472Mmjag34DUDeOJUZtl+BVnyeO1frIgVpHlNRWc2gk/w==", + "dependencies": { + "@types/estree-jsx": "^1.0.0", + "@types/hast": "^2.0.0", + "@types/mdast": "^3.0.0", + "mdast-util-from-markdown": "^1.0.0", + "mdast-util-to-markdown": "^1.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdxjs-esm/node_modules/longest-streak": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/longest-streak/-/longest-streak-3.1.0.tgz", + "integrity": "sha512-9Ri+o0JYgehTaVBBDoMqIl8GXtbWg711O3srftcHhZ0dqnETqLaoIK0x17fUw9rFSlK/0NlsKe0Ahhyl5pXE2g==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/mdast-util-mdxjs-esm/node_modules/mdast-util-to-markdown": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/mdast-util-to-markdown/-/mdast-util-to-markdown-1.5.0.tgz", + "integrity": "sha512-bbv7TPv/WC49thZPg3jXuqzuvI45IL2EVAr/KxF0BSdHsU0ceFHOmwQn6evxAh1GaoK/6GQ1wp4R4oW2+LFL/A==", + "dependencies": { + "@types/mdast": "^3.0.0", + "@types/unist": "^2.0.0", + "longest-streak": "^3.0.0", + "mdast-util-phrasing": "^3.0.0", + "mdast-util-to-string": "^3.0.0", + "micromark-util-decode-string": "^1.0.0", + "unist-util-visit": "^4.0.0", + "zwitch": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdxjs-esm/node_modules/mdast-util-to-string": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-3.2.0.tgz", + "integrity": "sha512-V4Zn/ncyN1QNSqSBxTrMOLpjr+IKdHl2v3KVLoWmDPscP4r9GcCi71gjgvUV1SFSKh92AjAG4peFuBl2/YgCJg==", + "dependencies": { + "@types/mdast": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdxjs-esm/node_modules/unist-util-is": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-5.2.1.tgz", + "integrity": "sha512-u9njyyfEh43npf1M+yGKDGVPbY/JWEemg5nH05ncKPfi+kBbKBJoTdsogMu33uhytuLlv9y0O7GH7fEdwLdLQw==", + "dependencies": { + "@types/unist": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdxjs-esm/node_modules/unist-util-visit": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-4.1.2.tgz", + "integrity": "sha512-MSd8OUGISqHdVvfY9TPhyK2VdUrPgxkUtWSuMHF6XAAFuL4LokseigBnZtPnJMu+FbynTkFNnFlyjxpVKujMRg==", + "dependencies": { + "@types/unist": "^2.0.0", + "unist-util-is": "^5.0.0", + "unist-util-visit-parents": "^5.1.1" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdxjs-esm/node_modules/unist-util-visit-parents": { + "version": "5.1.3", + "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-5.1.3.tgz", + "integrity": "sha512-x6+y8g7wWMyQhL1iZfhIPhDAs7Xwbn9nRosDXl7qoPTSCy0yNxnKc+hWokFifWQIDGi154rdUqKvbCa4+1kLhg==", + "dependencies": { + "@types/unist": "^2.0.0", + "unist-util-is": "^5.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdxjs-esm/node_modules/zwitch": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/zwitch/-/zwitch-2.0.4.tgz", + "integrity": "sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/mdast-util-phrasing": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/mdast-util-phrasing/-/mdast-util-phrasing-3.0.1.tgz", + "integrity": "sha512-WmI1gTXUBJo4/ZmSk79Wcb2HcjPJBzM1nlI/OUWA8yk2X9ik3ffNbBGsU+09BFmXaL1IBb9fiuvq6/KMiNycSg==", + "dependencies": { + "@types/mdast": "^3.0.0", + "unist-util-is": "^5.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-phrasing/node_modules/unist-util-is": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-5.2.1.tgz", + "integrity": "sha512-u9njyyfEh43npf1M+yGKDGVPbY/JWEemg5nH05ncKPfi+kBbKBJoTdsogMu33uhytuLlv9y0O7GH7fEdwLdLQw==", + "dependencies": { + "@types/unist": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-to-hast": { + "version": "10.0.1", + "resolved": "https://registry.npmjs.org/mdast-util-to-hast/-/mdast-util-to-hast-10.0.1.tgz", + "integrity": "sha512-BW3LM9SEMnjf4HXXVApZMt8gLQWVNXc3jryK0nJu/rOXPOnlkUjmdkDlmxMirpbU9ILncGFIwLH/ubnWBbcdgA==", + "dependencies": { + "@types/mdast": "^3.0.0", + "@types/unist": "^2.0.0", + "mdast-util-definitions": "^4.0.0", + "mdurl": "^1.0.0", + "unist-builder": "^2.0.0", + "unist-util-generated": "^1.0.0", + "unist-util-position": "^3.0.0", + "unist-util-visit": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-to-markdown": { + "version": "0.6.5", + "resolved": "https://registry.npmjs.org/mdast-util-to-markdown/-/mdast-util-to-markdown-0.6.5.tgz", + "integrity": "sha512-XeV9sDE7ZlOQvs45C9UKMtfTcctcaj/pGwH8YLbMHoMOXNNCn2LsqVQOqrF1+/NU8lKDAqozme9SCXWyo9oAcQ==", + "dependencies": { + "@types/unist": "^2.0.0", + "longest-streak": "^2.0.0", + "mdast-util-to-string": "^2.0.0", + "parse-entities": "^2.0.0", + "repeat-string": "^1.0.0", + "zwitch": "^1.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-to-string": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-2.0.0.tgz", + "integrity": "sha512-AW4DRS3QbBayY/jJmD8437V1Gombjf8RSOUCMFBuo5iHi58AGEgVCKQ+ezHkZZDpAQS75hcBMpLqjpJTjtUL7w==", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdn-data": { + "version": "2.0.14", + "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.14.tgz", + "integrity": "sha512-dn6wd0uw5GsdswPFfsgMp5NSB0/aDe6fK94YJV/AJDYXL6HVLWBsxeq7js7Ad+mU2K9LAlwpk6kN2D5mwCPVow==" + }, + "node_modules/mdurl": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/mdurl/-/mdurl-1.0.1.tgz", + "integrity": "sha512-/sKlQJCBYVY9Ers9hqzKou4H6V5UWc/M59TH2dvkt+84itfnq7uFOMLpOiOS4ujvHP4etln18fmIxA5R5fll0g==" + }, + "node_modules/media-typer": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", + "integrity": "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/medium-zoom": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/medium-zoom/-/medium-zoom-1.0.8.tgz", + "integrity": "sha512-CjFVuFq/IfrdqesAXfg+hzlDKu6A2n80ZIq0Kl9kWjoHh9j1N9Uvk5X0/MmN0hOfm5F9YBswlClhcwnmtwz7gA==" + }, + "node_modules/memfs": { + "version": "3.5.3", + "resolved": "https://registry.npmjs.org/memfs/-/memfs-3.5.3.tgz", + "integrity": "sha512-UERzLsxzllchadvbPs5aolHh65ISpKpM+ccLbOJ8/vvpBKmAWf+la7dXFy7Mr0ySHbdHrFv5kGFCUHHe6GFEmw==", + "dependencies": { + "fs-monkey": "^1.0.4" + }, + "engines": { + "node": ">= 4.0.0" + } + }, + "node_modules/memoize-one": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/memoize-one/-/memoize-one-5.2.1.tgz", + "integrity": "sha512-zYiwtZUcYyXKo/np96AGZAckk+FWWsUdJ3cHGGmld7+AhvcWmQyGCYUh1hc4Q/pkOhb65dQR/pqCyK0cOaHz4Q==" + }, + "node_modules/meow": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/meow/-/meow-9.0.0.tgz", + "integrity": "sha512-+obSblOQmRhcyBt62furQqRAQpNyWXo8BuQ5bN7dG8wmwQ+vwHKp/rCFD4CrTP8CsDQD1sjoZ94K417XEUk8IQ==", + "dev": true, + "dependencies": { + "@types/minimist": "^1.2.0", + "camelcase-keys": "^6.2.2", + "decamelize": "^1.2.0", + "decamelize-keys": "^1.1.0", + "hard-rejection": "^2.1.0", + "minimist-options": "4.1.0", + "normalize-package-data": "^3.0.0", + "read-pkg-up": "^7.0.1", + "redent": "^3.0.0", + "trim-newlines": "^3.0.0", + "type-fest": "^0.18.0", + "yargs-parser": "^20.2.3" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/meow/node_modules/type-fest": { + "version": "0.18.1", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.18.1.tgz", + "integrity": "sha512-OIAYXk8+ISY+qTOwkHtKqzAuxchoMiD9Udx+FSGQDuiRR+PJKJHc2NJAXlbhkGwTt/4/nKZxELY1w3ReWOL8mw==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/merge-descriptors": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz", + "integrity": "sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w==" + }, + "node_modules/merge-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", + "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==" + }, + "node_modules/merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "engines": { + "node": ">= 8" + } + }, + "node_modules/methods": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", + "integrity": "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/micromark": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/micromark/-/micromark-3.2.0.tgz", + "integrity": "sha512-uD66tJj54JLYq0De10AhWycZWGQNUvDI55xPgk2sQM5kn1JYlhbCMTtEeT27+vAhW2FBQxLlOmS3pmA7/2z4aA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "@types/debug": "^4.0.0", + "debug": "^4.0.0", + "decode-named-character-reference": "^1.0.0", + "micromark-core-commonmark": "^1.0.1", + "micromark-factory-space": "^1.0.0", + "micromark-util-character": "^1.0.0", + "micromark-util-chunked": "^1.0.0", + "micromark-util-combine-extensions": "^1.0.0", + "micromark-util-decode-numeric-character-reference": "^1.0.0", + "micromark-util-encode": "^1.0.0", + "micromark-util-normalize-identifier": "^1.0.0", + "micromark-util-resolve-all": "^1.0.0", + "micromark-util-sanitize-uri": "^1.0.0", + "micromark-util-subtokenize": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.1", + "uvu": "^0.5.0" + } + }, + "node_modules/micromark-core-commonmark": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-core-commonmark/-/micromark-core-commonmark-1.1.0.tgz", + "integrity": "sha512-BgHO1aRbolh2hcrzL2d1La37V0Aoz73ymF8rAcKnohLy93titmv62E0gP8Hrx9PKcKrqCZ1BbLGbP3bEhoXYlw==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "decode-named-character-reference": "^1.0.0", + "micromark-factory-destination": "^1.0.0", + "micromark-factory-label": "^1.0.0", + "micromark-factory-space": "^1.0.0", + "micromark-factory-title": "^1.0.0", + "micromark-factory-whitespace": "^1.0.0", + "micromark-util-character": "^1.0.0", + "micromark-util-chunked": "^1.0.0", + "micromark-util-classify-character": "^1.0.0", + "micromark-util-html-tag-name": "^1.0.0", + "micromark-util-normalize-identifier": "^1.0.0", + "micromark-util-resolve-all": "^1.0.0", + "micromark-util-subtokenize": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.1", + "uvu": "^0.5.0" + } + }, + "node_modules/micromark-extension-gfm": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm/-/micromark-extension-gfm-0.3.3.tgz", + "integrity": "sha512-oVN4zv5/tAIA+l3GbMi7lWeYpJ14oQyJ3uEim20ktYFAcfX1x3LNlFGGlmrZHt7u9YlKExmyJdDGaTt6cMSR/A==", + "dependencies": { + "micromark": "~2.11.0", + "micromark-extension-gfm-autolink-literal": "~0.5.0", + "micromark-extension-gfm-strikethrough": "~0.6.5", + "micromark-extension-gfm-table": "~0.4.0", + "micromark-extension-gfm-tagfilter": "~0.3.0", + "micromark-extension-gfm-task-list-item": "~0.3.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-gfm-autolink-literal": { + "version": "0.5.7", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-autolink-literal/-/micromark-extension-gfm-autolink-literal-0.5.7.tgz", + "integrity": "sha512-ePiDGH0/lhcngCe8FtH4ARFoxKTUelMp4L7Gg2pujYD5CSMb9PbblnyL+AAMud/SNMyusbS2XDSiPIRcQoNFAw==", + "dependencies": { + "micromark": "~2.11.3" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-gfm-autolink-literal/node_modules/micromark": { + "version": "2.11.4", + "resolved": "https://registry.npmjs.org/micromark/-/micromark-2.11.4.tgz", + "integrity": "sha512-+WoovN/ppKolQOFIAajxi7Lu9kInbPxFuTBVEavFcL8eAfVstoc5MocPmqBeAdBOJV00uaVjegzH4+MA0DN/uA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "debug": "^4.0.0", + "parse-entities": "^2.0.0" + } + }, + "node_modules/micromark-extension-gfm-strikethrough": { + "version": "0.6.5", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-strikethrough/-/micromark-extension-gfm-strikethrough-0.6.5.tgz", + "integrity": "sha512-PpOKlgokpQRwUesRwWEp+fHjGGkZEejj83k9gU5iXCbDG+XBA92BqnRKYJdfqfkrRcZRgGuPuXb7DaK/DmxOhw==", + "dependencies": { + "micromark": "~2.11.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-gfm-strikethrough/node_modules/micromark": { + "version": "2.11.4", + "resolved": "https://registry.npmjs.org/micromark/-/micromark-2.11.4.tgz", + "integrity": "sha512-+WoovN/ppKolQOFIAajxi7Lu9kInbPxFuTBVEavFcL8eAfVstoc5MocPmqBeAdBOJV00uaVjegzH4+MA0DN/uA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "debug": "^4.0.0", + "parse-entities": "^2.0.0" } }, - "node_modules/meow/node_modules/type-fest": { - "version": "0.18.1", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.18.1.tgz", - "integrity": "sha512-OIAYXk8+ISY+qTOwkHtKqzAuxchoMiD9Udx+FSGQDuiRR+PJKJHc2NJAXlbhkGwTt/4/nKZxELY1w3ReWOL8mw==", - "dev": true, - "engines": { - "node": ">=10" + "node_modules/micromark-extension-gfm-table": { + "version": "0.4.3", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-table/-/micromark-extension-gfm-table-0.4.3.tgz", + "integrity": "sha512-hVGvESPq0fk6ALWtomcwmgLvH8ZSVpcPjzi0AjPclB9FsVRgMtGZkUcpE0zgjOCFAznKepF4z3hX8z6e3HODdA==", + "dependencies": { + "micromark": "~2.11.0" }, "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/merge-descriptors": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz", - "integrity": "sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w==" + "node_modules/micromark-extension-gfm-table/node_modules/micromark": { + "version": "2.11.4", + "resolved": "https://registry.npmjs.org/micromark/-/micromark-2.11.4.tgz", + "integrity": "sha512-+WoovN/ppKolQOFIAajxi7Lu9kInbPxFuTBVEavFcL8eAfVstoc5MocPmqBeAdBOJV00uaVjegzH4+MA0DN/uA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "debug": "^4.0.0", + "parse-entities": "^2.0.0" + } }, - "node_modules/merge-stream": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", - "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==" + "node_modules/micromark-extension-gfm-tagfilter": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-tagfilter/-/micromark-extension-gfm-tagfilter-0.3.0.tgz", + "integrity": "sha512-9GU0xBatryXifL//FJH+tAZ6i240xQuFrSL7mYi8f4oZSbc+NvXjkrHemeYP0+L4ZUT+Ptz3b95zhUZnMtoi/Q==", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } }, - "node_modules/merge2": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", - "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", - "engines": { - "node": ">= 8" + "node_modules/micromark-extension-gfm-task-list-item": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-task-list-item/-/micromark-extension-gfm-task-list-item-0.3.3.tgz", + "integrity": "sha512-0zvM5iSLKrc/NQl84pZSjGo66aTGd57C1idmlWmE87lkMcXrTxg1uXa/nXomxJytoje9trP0NDLvw4bZ/Z/XCQ==", + "dependencies": { + "micromark": "~2.11.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/methods": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", - "integrity": "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==", - "engines": { - "node": ">= 0.6" + "node_modules/micromark-extension-gfm-task-list-item/node_modules/micromark": { + "version": "2.11.4", + "resolved": "https://registry.npmjs.org/micromark/-/micromark-2.11.4.tgz", + "integrity": "sha512-+WoovN/ppKolQOFIAajxi7Lu9kInbPxFuTBVEavFcL8eAfVstoc5MocPmqBeAdBOJV00uaVjegzH4+MA0DN/uA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "debug": "^4.0.0", + "parse-entities": "^2.0.0" } }, - "node_modules/micromark": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/micromark/-/micromark-3.2.0.tgz", - "integrity": "sha512-uD66tJj54JLYq0De10AhWycZWGQNUvDI55xPgk2sQM5kn1JYlhbCMTtEeT27+vAhW2FBQxLlOmS3pmA7/2z4aA==", + "node_modules/micromark-extension-gfm/node_modules/micromark": { + "version": "2.11.4", + "resolved": "https://registry.npmjs.org/micromark/-/micromark-2.11.4.tgz", + "integrity": "sha512-+WoovN/ppKolQOFIAajxi7Lu9kInbPxFuTBVEavFcL8eAfVstoc5MocPmqBeAdBOJV00uaVjegzH4+MA0DN/uA==", "funding": [ { "type": "GitHub Sponsors", @@ -10274,29 +11813,14 @@ } ], "dependencies": { - "@types/debug": "^4.0.0", "debug": "^4.0.0", - "decode-named-character-reference": "^1.0.0", - "micromark-core-commonmark": "^1.0.1", - "micromark-factory-space": "^1.0.0", - "micromark-util-character": "^1.0.0", - "micromark-util-chunked": "^1.0.0", - "micromark-util-combine-extensions": "^1.0.0", - "micromark-util-decode-numeric-character-reference": "^1.0.0", - "micromark-util-encode": "^1.0.0", - "micromark-util-normalize-identifier": "^1.0.0", - "micromark-util-resolve-all": "^1.0.0", - "micromark-util-sanitize-uri": "^1.0.0", - "micromark-util-subtokenize": "^1.0.0", - "micromark-util-symbol": "^1.0.0", - "micromark-util-types": "^1.0.1", - "uvu": "^0.5.0" + "parse-entities": "^2.0.0" } }, - "node_modules/micromark-core-commonmark": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/micromark-core-commonmark/-/micromark-core-commonmark-1.1.0.tgz", - "integrity": "sha512-BgHO1aRbolh2hcrzL2d1La37V0Aoz73ymF8rAcKnohLy93titmv62E0gP8Hrx9PKcKrqCZ1BbLGbP3bEhoXYlw==", + "node_modules/micromark-extension-mdx-expression": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/micromark-extension-mdx-expression/-/micromark-extension-mdx-expression-1.0.8.tgz", + "integrity": "sha512-zZpeQtc5wfWKdzDsHRBY003H2Smg+PUi2REhqgIhdzAa5xonhP03FcXxqFSerFiNUr5AWmHpaNPQTBVOS4lrXw==", "funding": [ { "type": "GitHub Sponsors", @@ -10308,24 +11832,114 @@ } ], "dependencies": { - "decode-named-character-reference": "^1.0.0", - "micromark-factory-destination": "^1.0.0", - "micromark-factory-label": "^1.0.0", + "@types/estree": "^1.0.0", + "micromark-factory-mdx-expression": "^1.0.0", "micromark-factory-space": "^1.0.0", - "micromark-factory-title": "^1.0.0", - "micromark-factory-whitespace": "^1.0.0", "micromark-util-character": "^1.0.0", - "micromark-util-chunked": "^1.0.0", - "micromark-util-classify-character": "^1.0.0", - "micromark-util-html-tag-name": "^1.0.0", - "micromark-util-normalize-identifier": "^1.0.0", - "micromark-util-resolve-all": "^1.0.0", - "micromark-util-subtokenize": "^1.0.0", + "micromark-util-events-to-acorn": "^1.0.0", "micromark-util-symbol": "^1.0.0", - "micromark-util-types": "^1.0.1", + "micromark-util-types": "^1.0.0", "uvu": "^0.5.0" } }, + "node_modules/micromark-extension-mdx-jsx": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/micromark-extension-mdx-jsx/-/micromark-extension-mdx-jsx-1.0.5.tgz", + "integrity": "sha512-gPH+9ZdmDflbu19Xkb8+gheqEDqkSpdCEubQyxuz/Hn8DOXiXvrXeikOoBA71+e8Pfi0/UYmU3wW3H58kr7akA==", + "dependencies": { + "@types/acorn": "^4.0.0", + "@types/estree": "^1.0.0", + "estree-util-is-identifier-name": "^2.0.0", + "micromark-factory-mdx-expression": "^1.0.0", + "micromark-factory-space": "^1.0.0", + "micromark-util-character": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0", + "uvu": "^0.5.0", + "vfile-message": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-mdx-jsx/node_modules/vfile-message": { + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-3.1.4.tgz", + "integrity": "sha512-fa0Z6P8HUrQN4BZaX05SIVXic+7kE3b05PWAtPuYP9QLHsLKYR7/AlLW3NtOrpXRLeawpDLMsVkmk5DG0NXgWw==", + "dependencies": { + "@types/unist": "^2.0.0", + "unist-util-stringify-position": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-mdx-md": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/micromark-extension-mdx-md/-/micromark-extension-mdx-md-1.0.1.tgz", + "integrity": "sha512-7MSuj2S7xjOQXAjjkbjBsHkMtb+mDGVW6uI2dBL9snOBCbZmoNgDAeZ0nSn9j3T42UE/g2xVNMn18PJxZvkBEA==", + "dependencies": { + "micromark-util-types": "^1.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-mdxjs": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/micromark-extension-mdxjs/-/micromark-extension-mdxjs-1.0.1.tgz", + "integrity": "sha512-7YA7hF6i5eKOfFUzZ+0z6avRG52GpWR8DL+kN47y3f2KhxbBZMhmxe7auOeaTBrW2DenbbZTf1ea9tA2hDpC2Q==", + "dependencies": { + "acorn": "^8.0.0", + "acorn-jsx": "^5.0.0", + "micromark-extension-mdx-expression": "^1.0.0", + "micromark-extension-mdx-jsx": "^1.0.0", + "micromark-extension-mdx-md": "^1.0.0", + "micromark-extension-mdxjs-esm": "^1.0.0", + "micromark-util-combine-extensions": "^1.0.0", + "micromark-util-types": "^1.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-mdxjs-esm": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/micromark-extension-mdxjs-esm/-/micromark-extension-mdxjs-esm-1.0.5.tgz", + "integrity": "sha512-xNRBw4aoURcyz/S69B19WnZAkWJMxHMT5hE36GtDAyhoyn/8TuAeqjFJQlwk+MKQsUD7b3l7kFX+vlfVWgcX1w==", + "dependencies": { + "@types/estree": "^1.0.0", + "micromark-core-commonmark": "^1.0.0", + "micromark-util-character": "^1.0.0", + "micromark-util-events-to-acorn": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0", + "unist-util-position-from-estree": "^1.1.0", + "uvu": "^0.5.0", + "vfile-message": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-mdxjs-esm/node_modules/vfile-message": { + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-3.1.4.tgz", + "integrity": "sha512-fa0Z6P8HUrQN4BZaX05SIVXic+7kE3b05PWAtPuYP9QLHsLKYR7/AlLW3NtOrpXRLeawpDLMsVkmk5DG0NXgWw==", + "dependencies": { + "@types/unist": "^2.0.0", + "unist-util-stringify-position": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, "node_modules/micromark-factory-destination": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/micromark-factory-destination/-/micromark-factory-destination-1.1.0.tgz", @@ -10367,6 +11981,44 @@ "uvu": "^0.5.0" } }, + "node_modules/micromark-factory-mdx-expression": { + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/micromark-factory-mdx-expression/-/micromark-factory-mdx-expression-1.0.9.tgz", + "integrity": "sha512-jGIWzSmNfdnkJq05c7b0+Wv0Kfz3NJ3N4cBjnbO4zjXIlxJr+f8lk+5ZmwFvqdAbUy2q6B5rCY//g0QAAaXDWA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "@types/estree": "^1.0.0", + "micromark-util-character": "^1.0.0", + "micromark-util-events-to-acorn": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0", + "unist-util-position-from-estree": "^1.0.0", + "uvu": "^0.5.0", + "vfile-message": "^3.0.0" + } + }, + "node_modules/micromark-factory-mdx-expression/node_modules/vfile-message": { + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-3.1.4.tgz", + "integrity": "sha512-fa0Z6P8HUrQN4BZaX05SIVXic+7kE3b05PWAtPuYP9QLHsLKYR7/AlLW3NtOrpXRLeawpDLMsVkmk5DG0NXgWw==", + "dependencies": { + "@types/unist": "^2.0.0", + "unist-util-stringify-position": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, "node_modules/micromark-factory-space": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/micromark-factory-space/-/micromark-factory-space-1.1.0.tgz", @@ -10543,21 +12195,59 @@ "micromark-util-symbol": "^1.0.0" } }, - "node_modules/micromark-util-encode": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/micromark-util-encode/-/micromark-util-encode-1.1.0.tgz", - "integrity": "sha512-EuEzTWSTAj9PA5GOAs992GzNh2dGQO52UvAbtSOMvXTxv3Criqb6IOzJUBCmEqrrXSblJIJBbFFv6zPxpreiJw==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ] - }, + "node_modules/micromark-util-encode": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-encode/-/micromark-util-encode-1.1.0.tgz", + "integrity": "sha512-EuEzTWSTAj9PA5GOAs992GzNh2dGQO52UvAbtSOMvXTxv3Criqb6IOzJUBCmEqrrXSblJIJBbFFv6zPxpreiJw==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ] + }, + "node_modules/micromark-util-events-to-acorn": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/micromark-util-events-to-acorn/-/micromark-util-events-to-acorn-1.2.3.tgz", + "integrity": "sha512-ij4X7Wuc4fED6UoLWkmo0xJQhsktfNh1J0m8g4PbIMPlx+ek/4YdW5mvbye8z/aZvAPUoxgXHrwVlXAPKMRp1w==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "@types/acorn": "^4.0.0", + "@types/estree": "^1.0.0", + "@types/unist": "^2.0.0", + "estree-util-visit": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0", + "uvu": "^0.5.0", + "vfile-message": "^3.0.0" + } + }, + "node_modules/micromark-util-events-to-acorn/node_modules/vfile-message": { + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-3.1.4.tgz", + "integrity": "sha512-fa0Z6P8HUrQN4BZaX05SIVXic+7kE3b05PWAtPuYP9QLHsLKYR7/AlLW3NtOrpXRLeawpDLMsVkmk5DG0NXgWw==", + "dependencies": { + "@types/unist": "^2.0.0", + "unist-util-stringify-position": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, "node_modules/micromark-util-html-tag-name": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/micromark-util-html-tag-name/-/micromark-util-html-tag-name-1.2.0.tgz", @@ -12117,6 +13807,16 @@ "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-0.2.0.tgz", "integrity": "sha512-YHk5ez1hmMR5LOkb9iJkLKqoBlL7WD5M8ljC75ZfzXriuBIVNuecaXuU7e+hOwyqf24Wxhh7Vxgt7Hnw9288Tg==" }, + "node_modules/periscopic": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/periscopic/-/periscopic-3.1.0.tgz", + "integrity": "sha512-vKiQ8RRtkl9P+r/+oefh25C3fhybptkHKCZSPlcXiJux2tJF55GnEj3BVn4A5gKfq9NWWXXrxkHBwVPUfH0opw==", + "dependencies": { + "@types/estree": "^1.0.0", + "estree-walker": "^3.0.0", + "is-reference": "^3.0.0" + } + }, "node_modules/picocolors": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz", @@ -13957,6 +15657,56 @@ "jsesc": "bin/jsesc" } }, + "node_modules/rehype-parse": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/rehype-parse/-/rehype-parse-6.0.2.tgz", + "integrity": "sha512-0S3CpvpTAgGmnz8kiCyFLGuW5yA4OQhyNTm/nwPopZ7+PI11WnGl1TTWTGv/2hPEe/g2jRLlhVVSsoDH8waRug==", + "dependencies": { + "hast-util-from-parse5": "^5.0.0", + "parse5": "^5.0.0", + "xtend": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/rehype-parse/node_modules/hast-util-from-parse5": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/hast-util-from-parse5/-/hast-util-from-parse5-5.0.3.tgz", + "integrity": "sha512-gOc8UB99F6eWVWFtM9jUikjN7QkWxB3nY0df5Z0Zq1/Nkwl5V4hAAsl0tmwlgWl/1shlTF8DnNYLO8X6wRV9pA==", + "dependencies": { + "ccount": "^1.0.3", + "hastscript": "^5.0.0", + "property-information": "^5.0.0", + "web-namespaces": "^1.1.2", + "xtend": "^4.0.1" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/rehype-parse/node_modules/hastscript": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/hastscript/-/hastscript-5.1.2.tgz", + "integrity": "sha512-WlztFuK+Lrvi3EggsqOkQ52rKbxkXL3RwB6t5lwoa8QLMemoWfBuL43eDrwOamJyR7uKQKdmKYaBH1NZBiIRrQ==", + "dependencies": { + "comma-separated-tokens": "^1.0.0", + "hast-util-parse-selector": "^2.0.0", + "property-information": "^5.0.0", + "space-separated-tokens": "^1.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/rehype-parse/node_modules/parse5": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/parse5/-/parse5-5.1.1.tgz", + "integrity": "sha512-ugq4DFI0Ptb+WWjAdOK16+u/nHfiIrcE+sh8kZMaM0WllQKLI9rOUq6c2b7cwPkXdzfQESqvoqK6ug7U/Yyzug==" + }, "node_modules/relateurl": { "version": "0.2.7", "resolved": "https://registry.npmjs.org/relateurl/-/relateurl-0.2.7.tgz", @@ -13965,6 +15715,40 @@ "node": ">= 0.10" } }, + "node_modules/remark-admonitions": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/remark-admonitions/-/remark-admonitions-1.2.1.tgz", + "integrity": "sha512-Ji6p68VDvD+H1oS95Fdx9Ar5WA2wcDA4kwrrhVU7fGctC6+d3uiMICu7w7/2Xld+lnU7/gi+432+rRbup5S8ow==", + "dependencies": { + "rehype-parse": "^6.0.2", + "unified": "^8.4.2", + "unist-util-visit": "^2.0.1" + } + }, + "node_modules/remark-admonitions/node_modules/is-plain-obj": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-2.1.0.tgz", + "integrity": "sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA==", + "engines": { + "node": ">=8" + } + }, + "node_modules/remark-admonitions/node_modules/unified": { + "version": "8.4.2", + "resolved": "https://registry.npmjs.org/unified/-/unified-8.4.2.tgz", + "integrity": "sha512-JCrmN13jI4+h9UAyKEoGcDZV+i1E7BLFuG7OsaDvTXI5P0qhHX+vZO/kOhz9jn8HGENDKbwSeB0nVOg4gVStGA==", + "dependencies": { + "bail": "^1.0.0", + "extend": "^3.0.0", + "is-plain-obj": "^2.0.0", + "trough": "^1.0.0", + "vfile": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, "node_modules/remark-emoji": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/remark-emoji/-/remark-emoji-2.2.0.tgz", @@ -13984,6 +15768,19 @@ "url": "https://opencollective.com/unified" } }, + "node_modules/remark-gfm": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/remark-gfm/-/remark-gfm-1.0.0.tgz", + "integrity": "sha512-KfexHJCiqvrdBZVbQ6RopMZGwaXz6wFJEfByIuEwGf0arvITHjiKKZ1dpXujjH9KZdm1//XJQwgfnJ3lmXaDPA==", + "dependencies": { + "mdast-util-gfm": "^0.1.0", + "micromark-extension-gfm": "^0.3.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, "node_modules/remark-mdx": { "version": "1.6.22", "resolved": "https://registry.npmjs.org/remark-mdx/-/remark-mdx-1.6.22.tgz", @@ -14206,6 +16003,189 @@ "url": "https://opencollective.com/unified" } }, + "node_modules/remark-rehype": { + "version": "10.1.0", + "resolved": "https://registry.npmjs.org/remark-rehype/-/remark-rehype-10.1.0.tgz", + "integrity": "sha512-EFmR5zppdBp0WQeDVZ/b66CWJipB2q2VLNFMabzDSGR66Z2fQii83G5gTBbgGEnEEA0QRussvrFHxk1HWGJskw==", + "dependencies": { + "@types/hast": "^2.0.0", + "@types/mdast": "^3.0.0", + "mdast-util-to-hast": "^12.1.0", + "unified": "^10.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/remark-rehype/node_modules/bail": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/bail/-/bail-2.0.2.tgz", + "integrity": "sha512-0xO6mYd7JB2YesxDKplafRpsiOzPt9V02ddPCLbY1xYGPOX24NTyN50qnUxgCPcSoYMhKpAuBTjQoRZCAkUDRw==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/remark-rehype/node_modules/is-plain-obj": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-4.1.0.tgz", + "integrity": "sha512-+Pgi+vMuUNkJyExiMBt5IlFoMyKnr5zhJ4Uspz58WOhBF5QoIZkFyNHIbBAtHwzVAgk5RtndVNsDRN61/mmDqg==", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/remark-rehype/node_modules/mdast-util-definitions": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/mdast-util-definitions/-/mdast-util-definitions-5.1.2.tgz", + "integrity": "sha512-8SVPMuHqlPME/z3gqVwWY4zVXn8lqKv/pAhC57FuJ40ImXyBpmO5ukh98zB2v7Blql2FiHjHv9LVztSIqjY+MA==", + "dependencies": { + "@types/mdast": "^3.0.0", + "@types/unist": "^2.0.0", + "unist-util-visit": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/remark-rehype/node_modules/mdast-util-to-hast": { + "version": "12.3.0", + "resolved": "https://registry.npmjs.org/mdast-util-to-hast/-/mdast-util-to-hast-12.3.0.tgz", + "integrity": "sha512-pits93r8PhnIoU4Vy9bjW39M2jJ6/tdHyja9rrot9uujkN7UTU9SDnE6WNJz/IGyQk3XHX6yNNtrBH6cQzm8Hw==", + "dependencies": { + "@types/hast": "^2.0.0", + "@types/mdast": "^3.0.0", + "mdast-util-definitions": "^5.0.0", + "micromark-util-sanitize-uri": "^1.1.0", + "trim-lines": "^3.0.0", + "unist-util-generated": "^2.0.0", + "unist-util-position": "^4.0.0", + "unist-util-visit": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/remark-rehype/node_modules/trough": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/trough/-/trough-2.1.0.tgz", + "integrity": "sha512-AqTiAOLcj85xS7vQ8QkAV41hPDIJ71XJB4RCUrzo/1GM2CQwhkJGaf9Hgr7BOugMRpgGUrqRg/DrBDl4H40+8g==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/remark-rehype/node_modules/unified": { + "version": "10.1.2", + "resolved": "https://registry.npmjs.org/unified/-/unified-10.1.2.tgz", + "integrity": "sha512-pUSWAi/RAnVy1Pif2kAoeWNBa3JVrx0MId2LASj8G+7AiHWoKZNTomq6LG326T68U7/e263X6fTdcXIy7XnF7Q==", + "dependencies": { + "@types/unist": "^2.0.0", + "bail": "^2.0.0", + "extend": "^3.0.0", + "is-buffer": "^2.0.0", + "is-plain-obj": "^4.0.0", + "trough": "^2.0.0", + "vfile": "^5.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/remark-rehype/node_modules/unist-util-generated": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/unist-util-generated/-/unist-util-generated-2.0.1.tgz", + "integrity": "sha512-qF72kLmPxAw0oN2fwpWIqbXAVyEqUzDHMsbtPvOudIlUzXYFIeQIuxXQCRCFh22B7cixvU0MG7m3MW8FTq/S+A==", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/remark-rehype/node_modules/unist-util-is": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-5.2.1.tgz", + "integrity": "sha512-u9njyyfEh43npf1M+yGKDGVPbY/JWEemg5nH05ncKPfi+kBbKBJoTdsogMu33uhytuLlv9y0O7GH7fEdwLdLQw==", + "dependencies": { + "@types/unist": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/remark-rehype/node_modules/unist-util-position": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/unist-util-position/-/unist-util-position-4.0.4.tgz", + "integrity": "sha512-kUBE91efOWfIVBo8xzh/uZQ7p9ffYRtUbMRZBNFYwf0RK8koUMx6dGUfwylLOKmaT2cs4wSW96QoYUSXAyEtpg==", + "dependencies": { + "@types/unist": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/remark-rehype/node_modules/unist-util-visit": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-4.1.2.tgz", + "integrity": "sha512-MSd8OUGISqHdVvfY9TPhyK2VdUrPgxkUtWSuMHF6XAAFuL4LokseigBnZtPnJMu+FbynTkFNnFlyjxpVKujMRg==", + "dependencies": { + "@types/unist": "^2.0.0", + "unist-util-is": "^5.0.0", + "unist-util-visit-parents": "^5.1.1" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/remark-rehype/node_modules/unist-util-visit-parents": { + "version": "5.1.3", + "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-5.1.3.tgz", + "integrity": "sha512-x6+y8g7wWMyQhL1iZfhIPhDAs7Xwbn9nRosDXl7qoPTSCy0yNxnKc+hWokFifWQIDGi154rdUqKvbCa4+1kLhg==", + "dependencies": { + "@types/unist": "^2.0.0", + "unist-util-is": "^5.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/remark-rehype/node_modules/vfile": { + "version": "5.3.7", + "resolved": "https://registry.npmjs.org/vfile/-/vfile-5.3.7.tgz", + "integrity": "sha512-r7qlzkgErKjobAmyNIkkSpizsFPYiUPuJb5pNW1RB4JcYVZhs4lIbVqk8XPk033CV/1z8ss5pkax8SuhGpcG8g==", + "dependencies": { + "@types/unist": "^2.0.0", + "is-buffer": "^2.0.0", + "unist-util-stringify-position": "^3.0.0", + "vfile-message": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/remark-rehype/node_modules/vfile-message": { + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-3.1.4.tgz", + "integrity": "sha512-fa0Z6P8HUrQN4BZaX05SIVXic+7kE3b05PWAtPuYP9QLHsLKYR7/AlLW3NtOrpXRLeawpDLMsVkmk5DG0NXgWw==", + "dependencies": { + "@types/unist": "^2.0.0", + "unist-util-stringify-position": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, "node_modules/remark-squeeze-paragraphs": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/remark-squeeze-paragraphs/-/remark-squeeze-paragraphs-4.0.0.tgz", @@ -15445,6 +17425,28 @@ "url": "https://github.com/chalk/strip-ansi?sponsor=1" } }, + "node_modules/stringify-entities": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/stringify-entities/-/stringify-entities-4.0.3.tgz", + "integrity": "sha512-BP9nNHMhhfcMbiuQKCqMjhDP5yBCAxsPu4pHFFzJ6Alo9dZgY4VLDPutXqIjpRiMoKdp7Av85Gr73Q5uH9k7+g==", + "dependencies": { + "character-entities-html4": "^2.0.0", + "character-entities-legacy": "^3.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/stringify-entities/node_modules/character-entities-legacy": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/character-entities-legacy/-/character-entities-legacy-3.0.0.tgz", + "integrity": "sha512-RpPp0asT/6ufRm//AJVwpViZbGM/MkjQFxJccQRHmISF/22NBtsHqAWmL+/pmkPWoIUJdWyeVleTl1wydHATVQ==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/stringify-object": { "version": "3.3.0", "resolved": "https://registry.npmjs.org/stringify-object/-/stringify-object-3.3.0.tgz", @@ -16033,6 +18035,15 @@ "integrity": "sha512-YzQV+TZg4AxpKxaTHK3c3D+kRDCGVEE7LemdlQZoQXn0iennk10RsIoY6ikzAqJTc9Xjl9C1/waHom/J86ziAQ==", "deprecated": "Use String.prototype.trim() instead" }, + "node_modules/trim-lines": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/trim-lines/-/trim-lines-3.0.1.tgz", + "integrity": "sha512-kRj8B+YHZCc9kQYdWfJB2/oUl9rA99qbowYYBtr4ui4mZyAQ2JpvVBd/6U2YloATfqBhBTSMhTpgBHtU0Mf3Rg==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, "node_modules/trim-newlines": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/trim-newlines/-/trim-newlines-3.0.1.tgz", @@ -16441,6 +18452,18 @@ "url": "https://opencollective.com/unified" } }, + "node_modules/unist-util-position-from-estree": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/unist-util-position-from-estree/-/unist-util-position-from-estree-1.1.2.tgz", + "integrity": "sha512-poZa0eXpS+/XpoQwGwl79UUdea4ol2ZuCYguVaJS4qzIOMDzbqz8a3erUCOmubSZkaOuGamb3tX790iwOIROww==", + "dependencies": { + "@types/unist": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, "node_modules/unist-util-remove": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/unist-util-remove/-/unist-util-remove-2.1.0.tgz", diff --git a/docs/package.json b/docs/package.json index c7732b3dce..856e66ebe0 100644 --- a/docs/package.json +++ b/docs/package.json @@ -15,12 +15,13 @@ }, "dependencies": { "@babel/preset-react": "^7.22.3", + "@code-hike/mdx": "^0.9.0", "@docusaurus/core": "2.4.1", "@docusaurus/plugin-ideal-image": "^2.4.1", "@docusaurus/preset-classic": "2.4.1", "@docusaurus/theme-classic": "^2.4.1", "@docusaurus/theme-search-algolia": "^2.4.1", - "@mdx-js/react": "^1.6.22", + "@mdx-js/react": "^2.3.0", "@mendable/search": "^0.0.114", "@pbe/react-yandex-maps": "^1.2.4", "@prismicio/client": "^7.0.1", @@ -28,6 +29,7 @@ "autoprefixer": "^10.4.14", "clsx": "^1.2.1", "docusaurus-plugin-image-zoom": "^0.1.4", + "docusaurus-theme-mdx-v2": "^0.1.2", "jquery": "^3.7.0", "medium-zoom": "^1.0.8", "node-fetch": "^3.3.1", @@ -67,4 +69,4 @@ "engines": { "node": ">=16.14" } -} \ No newline at end of file +} diff --git a/docs/sidebars.js b/docs/sidebars.js index 01a84cf334..ef98f042a6 100644 --- a/docs/sidebars.js +++ b/docs/sidebars.js @@ -21,6 +21,8 @@ module.exports = { "guidelines/collection", "guidelines/prompt-customization", "guidelines/chat-interface", + "guidelines/chat-widget", + "guidelines/custom-component", ], }, { @@ -30,6 +32,7 @@ module.exports = { items: [ "components/agents", "components/chains", + "components/custom", "components/embeddings", "components/llms", "components/loaders", @@ -63,6 +66,7 @@ module.exports = { label: "Examples", collapsed: false, items: [ + "examples/flow-runner", "examples/conversation-chain", "examples/buffer-memory", "examples/midjourney-prompt-chain", diff --git a/docs/src/css/custom.css b/docs/src/css/custom.css index 2f6f992f30..b79c4df59a 100644 --- a/docs/src/css/custom.css +++ b/docs/src/css/custom.css @@ -3,17 +3,19 @@ * bundles Infima by default. Infima is a CSS framework designed to * work well for content-centric websites. */ - :root { +:root { --ifm-background-color: var(--token-primary-bg-c); --ifm-navbar-link-hover-color: initial; --ifm-navbar-padding-vertical: 0; --ifm-navbar-item-padding-vertical: 0; - --ifm-font-family-base: -apple-system, BlinkMacSystemFont, Inter, Helvetica, Arial, sans-serif, 'Apple Color Emoji', 'Segoe UI emoji'; - --ifm-font-family-monospace: 'SFMono-Regular', 'Roboto Mono', Consolas, 'Liberation Mono', Menlo, Courier, monospace; + --ifm-font-family-base: -apple-system, BlinkMacSystemFont, Inter, Helvetica, + Arial, sans-serif, "Apple Color Emoji", "Segoe UI emoji"; + --ifm-font-family-monospace: "SFMono-Regular", "Roboto Mono", Consolas, + "Liberation Mono", Menlo, Courier, monospace; } .theme-doc-sidebar-item-category.menu__list-item:not(:first-child) { - margin-top: 1.5rem!important; + margin-top: 1.5rem !important; } .docusaurus-highlight-code-line { @@ -31,7 +33,7 @@ transform: skewY(6deg); } -[class^='announcementBar'] { +[class^="announcementBar"] { z-index: 10; } @@ -112,7 +114,7 @@ body { } .header-github-link:before { - content: ''; + content: ""; width: 24px; height: 24px; display: flex; @@ -126,7 +128,7 @@ body { } .header-twitter-link::before { - content: ''; + content: ""; width: 24px; height: 24px; display: flex; @@ -140,7 +142,7 @@ body { } .header-discord-link::before { - content: ''; + content: ""; width: 24px; height: 24px; display: flex; @@ -148,7 +150,6 @@ body { background-size: contain; } - /* Images */ .image-rendering-crisp { image-rendering: crisp-edges; @@ -164,7 +165,7 @@ body { .img-center { display: flex; justify-content: center; - width: 100%, + width: 100%; } .resized-image { @@ -188,4 +189,22 @@ body { .mendable-search { width: 140px; } -} \ No newline at end of file +} +/* +.ch-scrollycoding { + gap: 10rem !important; +} */ + +.ch-scrollycoding-content { + max-width: 55% !important; + min-width: 40% !important; +} + +.ch-scrollycoding-sticker { + max-width: 60% !important; + min-width: 45% !important; +} + +.ch-scrollycoding-step-content { + min-height: 70px; +} diff --git a/docs/src/theme/ZoomableImage.js b/docs/src/theme/ZoomableImage.js index 750066bb72..aeeb0454a5 100644 --- a/docs/src/theme/ZoomableImage.js +++ b/docs/src/theme/ZoomableImage.js @@ -1,8 +1,9 @@ -import React, { useState, useEffect } from 'react'; -import ThemedImage from '@theme/ThemedImage'; -import useBaseUrl from '@docusaurus/useBaseUrl'; +import React, { useState, useEffect } from "react"; +import ThemedImage from "@theme/ThemedImage"; +import useBaseUrl from "@docusaurus/useBaseUrl"; -const ZoomableImage = ({ alt, sources }) => { +const ZoomableImage = ({ alt, sources, style }) => { + // add style here const [isFullscreen, setIsFullscreen] = useState(false); const toggleFullscreen = () => { @@ -10,27 +11,36 @@ const ZoomableImage = ({ alt, sources }) => { }; const handleKeyPress = (event) => { - if (event.key === 'Escape') { + if (event.key === "Escape") { setIsFullscreen(false); } }; useEffect(() => { if (isFullscreen) { - document.addEventListener('keydown', handleKeyPress); + document.addEventListener("keydown", handleKeyPress); } else { - document.removeEventListener('keydown', handleKeyPress); + document.removeEventListener("keydown", handleKeyPress); } return () => { - document.removeEventListener('keydown', handleKeyPress); + document.removeEventListener("keydown", handleKeyPress); }; }, [isFullscreen]); + // Default style + const defaultStyle = { + width: "50%", + margin: "0 auto", + display: "flex", + justifyContent: "center", + }; + return (
Any: # This is to emulate the stream of tokens for resp in resps: await self.websocket.send_json(resp.dict()) - except Exception as e: - logger.error(e) + except Exception as exc: + logger.error(f"Error sending response: {exc}") async def on_tool_error( self, error: Union[Exception, KeyboardInterrupt], **kwargs: Any diff --git a/src/backend/langflow/api/v1/chat.py b/src/backend/langflow/api/v1/chat.py index 43f10a54b1..4248bcebd5 100644 --- a/src/backend/langflow/api/v1/chat.py +++ b/src/backend/langflow/api/v1/chat.py @@ -26,7 +26,7 @@ async def chat(client_id: str, websocket: WebSocket): message = "Please, build the flow before sending messages" await websocket.close(code=status.WS_1011_INTERNAL_ERROR, reason=message) except WebSocketException as exc: - logger.error(exc) + logger.error(f"Websocket error: {exc}") await websocket.close(code=status.WS_1011_INTERNAL_ERROR, reason=str(exc)) @@ -56,7 +56,7 @@ async def init_build(graph_data: dict, flow_id: str): return InitResponse(flowId=flow_id) except Exception as exc: - logger.error(exc) + logger.error(f"Error initializing build: {exc}") return HTTPException(status_code=500, detail=str(exc)) @@ -74,7 +74,7 @@ async def build_status(flow_id: str): ) except Exception as exc: - logger.error(exc) + logger.error(f"Error checking build status: {exc}") return HTTPException(status_code=500, detail=str(exc)) @@ -177,5 +177,5 @@ async def event_stream(flow_id): try: return StreamingResponse(event_stream(flow_id), media_type="text/event-stream") except Exception as exc: - logger.error(exc) + logger.error(f"Error streaming build: {exc}") raise HTTPException(status_code=500, detail=str(exc)) diff --git a/src/backend/langflow/api/v1/components.py b/src/backend/langflow/api/v1/components.py new file mode 100644 index 0000000000..1e34da2aaa --- /dev/null +++ b/src/backend/langflow/api/v1/components.py @@ -0,0 +1,77 @@ +from datetime import timezone +from typing import List +from uuid import UUID +from langflow.database.models.component import Component, ComponentModel +from langflow.database.base import get_session +from sqlmodel import Session, select +from fastapi import APIRouter, Depends, HTTPException +from sqlalchemy.exc import IntegrityError +from datetime import datetime + + +COMPONENT_NOT_FOUND = "Component not found" +COMPONENT_ALREADY_EXISTS = "A component with the same id already exists." +COMPONENT_DELETED = "Component deleted" + + +router = APIRouter(prefix="/components", tags=["Components"]) + + +@router.post("/", response_model=Component) +def create_component(component: ComponentModel, db: Session = Depends(get_session)): + db_component = Component(**component.dict()) + try: + db.add(db_component) + db.commit() + db.refresh(db_component) + except IntegrityError as e: + db.rollback() + raise HTTPException( + status_code=400, + detail=COMPONENT_ALREADY_EXISTS, + ) from e + return db_component + + +@router.get("/{component_id}", response_model=Component) +def read_component(component_id: UUID, db: Session = Depends(get_session)): + if component := db.get(Component, component_id): + return component + else: + raise HTTPException(status_code=404, detail=COMPONENT_NOT_FOUND) + + +@router.get("/", response_model=List[Component]) +def read_components(skip: int = 0, limit: int = 50, db: Session = Depends(get_session)): + query = select(Component) + query = query.offset(skip).limit(limit) + + return db.execute(query).fetchall() + + +@router.patch("/{component_id}", response_model=Component) +def update_component( + component_id: UUID, component: ComponentModel, db: Session = Depends(get_session) +): + db_component = db.get(Component, component_id) + if not db_component: + raise HTTPException(status_code=404, detail=COMPONENT_NOT_FOUND) + component_data = component.dict(exclude_unset=True) + + for key, value in component_data.items(): + setattr(db_component, key, value) + + db_component.update_at = datetime.now(timezone.utc) + db.commit() + db.refresh(db_component) + return db_component + + +@router.delete("/{component_id}") +def delete_component(component_id: UUID, db: Session = Depends(get_session)): + component = db.get(Component, component_id) + if not component: + raise HTTPException(status_code=404, detail=COMPONENT_NOT_FOUND) + db.delete(component) + db.commit() + return {"detail": COMPONENT_DELETED} diff --git a/src/backend/langflow/api/v1/endpoints.py b/src/backend/langflow/api/v1/endpoints.py index 13cba6c2c2..f11a233408 100644 --- a/src/backend/langflow/api/v1/endpoints.py +++ b/src/backend/langflow/api/v1/endpoints.py @@ -1,17 +1,34 @@ -from typing import Optional +from http import HTTPStatus +from typing import Annotated, Optional + from langflow.cache.utils import save_uploaded_file from langflow.database.models.flow import Flow from langflow.processing.process import process_graph_cached, process_tweaks from langflow.utils.logger import logger +from langflow.settings import settings + +from fastapi import APIRouter, Depends, HTTPException, UploadFile, Body -from fastapi import APIRouter, Depends, HTTPException, UploadFile +from langflow.interface.custom.custom_component import CustomComponent + +from langflow.interface.custom.directory_reader import ( + CustomComponentPathValueError, +) from langflow.api.v1.schemas import ( ProcessResponse, UploadFileResponse, + CustomComponentCode, +) + +from langflow.api.utils import merge_nested_dicts + +from langflow.interface.types import ( + build_langchain_types_dict, + build_langchain_template_custom_component, + build_langchain_custom_component_list_from_path, ) -from langflow.interface.types import langchain_types_dict from langflow.database.base import get_session from sqlmodel import Session @@ -21,7 +38,47 @@ @router.get("/all") def get_all(): - return langchain_types_dict + native_components = build_langchain_types_dict() + + # custom_components is a list of dicts + # need to merge all the keys into one dict + custom_components_from_file = {} + if settings.components_path: + custom_component_dicts = [ + build_langchain_custom_component_list_from_path(str(path)) + for path in settings.components_path + ] + for custom_component_dict in custom_component_dicts: + custom_components_from_file = merge_nested_dicts( + custom_components_from_file, custom_component_dict + ) + return merge_nested_dicts(native_components, custom_components_from_file) + + +@router.get("/load_custom_component_from_path") +def get_load_custom_component_from_path(path: str): + try: + data = build_langchain_custom_component_list_from_path(path) + except CustomComponentPathValueError as err: + raise HTTPException( + status_code=400, + detail={"error": type(err).__name__, "traceback": str(err)}, + ) from err + + return data + + +@router.get("/load_custom_component_from_path_TEST") +def get_load_custom_component_from_path_test(path: str): + from langflow.interface.custom.directory_reader import ( + DirectoryReader, + ) + + reader = DirectoryReader(path, False) + file_list = reader.get_files() + data = reader.build_component_menu_list(file_list) + + return reader.filter_loaded_components(data, True) # For backwards compatibility we will keep the old endpoint @@ -31,6 +88,7 @@ async def process_flow( flow_id: str, inputs: Optional[dict] = None, tweaks: Optional[dict] = None, + clear_cache: Annotated[bool, Body(embed=True)] = False, # noqa: F821 session: Session = Depends(get_session), ): """ @@ -50,7 +108,7 @@ async def process_flow( graph_data = process_tweaks(graph_data, tweaks) except Exception as exc: logger.error(f"Error processing tweaks: {exc}") - response = process_graph_cached(graph_data, inputs) + response = process_graph_cached(graph_data, inputs, clear_cache) return ProcessResponse( result=response, ) @@ -60,7 +118,11 @@ async def process_flow( raise HTTPException(status_code=500, detail=str(e)) from e -@router.post("/upload/{flow_id}", response_model=UploadFileResponse, status_code=201) +@router.post( + "/upload/{flow_id}", + response_model=UploadFileResponse, + status_code=HTTPStatus.CREATED, +) async def create_upload_file(file: UploadFile, flow_id: str): # Cache file try: @@ -81,3 +143,13 @@ def get_version(): from langflow import __version__ return {"version": __version__} + + +@router.post("/custom_component", status_code=HTTPStatus.OK) +async def custom_component( + raw_code: CustomComponentCode, +): + extractor = CustomComponent(code=raw_code.code) + extractor.is_check_valid() + + return build_langchain_template_custom_component(extractor) diff --git a/src/backend/langflow/api/v1/schemas.py b/src/backend/langflow/api/v1/schemas.py index e4b9a6e84f..0148dac6db 100644 --- a/src/backend/langflow/api/v1/schemas.py +++ b/src/backend/langflow/api/v1/schemas.py @@ -116,3 +116,20 @@ class StreamData(BaseModel): def __str__(self) -> str: return f"event: {self.event}\ndata: {json.dumps(self.data)}\n\n" + + +class CustomComponentCode(BaseModel): + code: str + + +class CustomComponentResponseError(BaseModel): + detail: str + traceback: str + + +class ComponentListCreate(BaseModel): + flows: List[FlowCreate] + + +class ComponentListRead(BaseModel): + flows: List[FlowRead] diff --git a/src/backend/langflow/chat/manager.py b/src/backend/langflow/chat/manager.py index 33de784b53..1e93174e2f 100644 --- a/src/backend/langflow/chat/manager.py +++ b/src/backend/langflow/chat/manager.py @@ -111,7 +111,7 @@ async def close_connection(self, client_id: str, code: int, reason: str): # This is to catch the following error: # Unexpected ASGI message 'websocket.close', after sending 'websocket.close' if "after sending" in str(exc): - logger.error(exc) + logger.error(f"Error closing connection: {exc}") async def process_message( self, client_id: str, payload: Dict, langchain_object: Any @@ -197,13 +197,13 @@ async def handle_websocket(self, client_id: str, websocket: WebSocket): langchain_object = self.in_memory_cache.get(client_id) await self.process_message(client_id, payload, langchain_object) - except Exception as e: + except Exception as exc: # Handle any exceptions that might occur - logger.error(e) + logger.error(f"Error handling websocket: {exc}") await self.close_connection( client_id=client_id, code=status.WS_1011_INTERNAL_ERROR, - reason=str(e)[:120], + reason=str(exc)[:120], ) finally: try: @@ -212,6 +212,6 @@ async def handle_websocket(self, client_id: str, websocket: WebSocket): code=status.WS_1000_NORMAL_CLOSURE, reason="Client disconnected", ) - except Exception as e: - logger.error(e) + except Exception as exc: + logger.error(f"Error closing connection: {exc}") self.disconnect(client_id) diff --git a/src/backend/langflow/config.yaml b/src/backend/langflow/config.yaml index 7a75d8ff47..63e8cdf993 100644 --- a/src/backend/langflow/config.yaml +++ b/src/backend/langflow/config.yaml @@ -292,3 +292,6 @@ output_parsers: documentation: "https://python.langchain.com/docs/modules/model_io/output_parsers/structured" ResponseSchema: documentation: "https://python.langchain.com/docs/modules/model_io/output_parsers/structured" +custom_components: + CustomComponent: + documentation: "" diff --git a/src/backend/langflow/custom/customs.py b/src/backend/langflow/custom/customs.py index 58ef1b5088..55d855197a 100644 --- a/src/backend/langflow/custom/customs.py +++ b/src/backend/langflow/custom/customs.py @@ -31,6 +31,9 @@ "MidJourneyPromptChain": frontend_node.chains.MidJourneyPromptChainNode(), "load_qa_chain": frontend_node.chains.CombineDocsChainNode(), }, + "custom_components": { + "CustomComponent": frontend_node.custom_components.CustomComponentFrontendNode(), + }, } diff --git a/src/backend/langflow/database/base.py b/src/backend/langflow/database/base.py index 256434523d..338298a6be 100644 --- a/src/backend/langflow/database/base.py +++ b/src/backend/langflow/database/base.py @@ -1,3 +1,4 @@ +from contextlib import contextmanager from langflow.settings import settings from sqlmodel import SQLModel, Session, create_engine from langflow.utils.logger import logger @@ -32,6 +33,19 @@ def create_db_and_tables(): logger.debug("Database and tables created successfully") +@contextmanager +def session_getter(): + try: + session = Session(engine) + yield session + except Exception as e: + print("Session rollback because of exception:", e) + session.rollback() + raise + finally: + session.close() + + def get_session(): - with Session(engine) as session: + with session_getter() as session: yield session diff --git a/src/backend/langflow/database/models/component.py b/src/backend/langflow/database/models/component.py new file mode 100644 index 0000000000..bb2408cdbd --- /dev/null +++ b/src/backend/langflow/database/models/component.py @@ -0,0 +1,29 @@ +from langflow.database.models.base import SQLModelSerializable, SQLModel +from sqlmodel import Field +from typing import Optional +from datetime import datetime +import uuid + + +class Component(SQLModelSerializable, table=True): + id: uuid.UUID = Field(default_factory=uuid.uuid4, primary_key=True) + frontend_node_id: uuid.UUID = Field(index=True) + name: str = Field(index=True) + description: Optional[str] = Field(default=None) + python_code: Optional[str] = Field(default=None) + return_type: Optional[str] = Field(default=None) + is_disabled: bool = Field(default=False) + is_read_only: bool = Field(default=False) + create_at: datetime = Field(default_factory=datetime.utcnow) + update_at: datetime = Field(default_factory=datetime.utcnow) + + +class ComponentModel(SQLModel): + id: uuid.UUID = Field(default_factory=uuid.uuid4) + frontend_node_id: uuid.UUID = Field(default=uuid.uuid4()) + name: str = Field(default="") + description: Optional[str] = None + python_code: Optional[str] = None + return_type: Optional[str] = None + is_disabled: bool = False + is_read_only: bool = False diff --git a/src/backend/langflow/graph/graph/base.py b/src/backend/langflow/graph/graph/base.py index 0d93dd0dba..99b4e2b3d7 100644 --- a/src/backend/langflow/graph/graph/base.py +++ b/src/backend/langflow/graph/graph/base.py @@ -77,6 +77,8 @@ def _build_node_params(self) -> None: def _validate_nodes(self) -> None: """Check that all nodes have edges""" + if len(self.nodes) == 1: + return for node in self.nodes: if not self._validate_node(node): raise ValueError( diff --git a/src/backend/langflow/graph/graph/constants.py b/src/backend/langflow/graph/graph/constants.py index a2fd287ebf..5e5c3b7091 100644 --- a/src/backend/langflow/graph/graph/constants.py +++ b/src/backend/langflow/graph/graph/constants.py @@ -14,7 +14,7 @@ from langflow.interface.wrappers.base import wrapper_creator from langflow.interface.output_parsers.base import output_parser_creator from langflow.interface.retrievers.base import retriever_creator - +from langflow.interface.custom.base import custom_component_creator from typing import Dict, Type @@ -32,5 +32,6 @@ **{t: types.DocumentLoaderVertex for t in documentloader_creator.to_list()}, **{t: types.TextSplitterVertex for t in textsplitter_creator.to_list()}, **{t: types.OutputParserVertex for t in output_parser_creator.to_list()}, + **{t: types.CustomComponentVertex for t in custom_component_creator.to_list()}, **{t: types.RetrieverVertex for t in retriever_creator.to_list()}, } diff --git a/src/backend/langflow/graph/vertex/types.py b/src/backend/langflow/graph/vertex/types.py index 20ec3e66d7..5aee7b14cf 100644 --- a/src/backend/langflow/graph/vertex/types.py +++ b/src/backend/langflow/graph/vertex/types.py @@ -239,3 +239,12 @@ def _built_object_repr(self): class OutputParserVertex(Vertex): def __init__(self, data: Dict): super().__init__(data, base_type="output_parsers") + + +class CustomComponentVertex(Vertex): + def __init__(self, data: Dict): + super().__init__(data, base_type="custom_components") + + def _built_object_repr(self): + if self.artifacts and "repr" in self.artifacts: + return self.artifacts["repr"] or super()._built_object_repr() diff --git a/src/backend/langflow/interface/base.py b/src/backend/langflow/interface/base.py index 6e1522dd26..e6a28bf7dc 100644 --- a/src/backend/langflow/interface/base.py +++ b/src/backend/langflow/interface/base.py @@ -34,7 +34,7 @@ def docs_map(self) -> Dict[str, str]: for name, value_dict in type_settings.items() } except AttributeError as exc: - logger.error(exc) + logger.error(f"Error getting settings for {self.type_name}: {exc}") self.name_docs_dict = {} return self.name_docs_dict diff --git a/src/backend/langflow/interface/custom/__init__.py b/src/backend/langflow/interface/custom/__init__.py new file mode 100644 index 0000000000..5b87e9fa3a --- /dev/null +++ b/src/backend/langflow/interface/custom/__init__.py @@ -0,0 +1,4 @@ +from langflow.interface.custom.base import CustomComponentCreator +from langflow.interface.custom.custom_component import CustomComponent + +__all__ = ["CustomComponentCreator", "CustomComponent"] diff --git a/src/backend/langflow/interface/custom/base.py b/src/backend/langflow/interface/custom/base.py new file mode 100644 index 0000000000..06e874fa75 --- /dev/null +++ b/src/backend/langflow/interface/custom/base.py @@ -0,0 +1,48 @@ +from typing import Any, Dict, List, Optional, Type + + +from langflow.interface.base import LangChainTypeCreator + +# from langflow.interface.custom.custom import CustomComponent +from langflow.interface.custom.custom_component import CustomComponent +from langflow.template.frontend_node.custom_components import ( + CustomComponentFrontendNode, +) +from langflow.utils.logger import logger + +# Assuming necessary imports for Field, Template, and FrontendNode classes + + +class CustomComponentCreator(LangChainTypeCreator): + type_name: str = "custom_components" + + @property + def frontend_node_class(self) -> Type[CustomComponentFrontendNode]: + return CustomComponentFrontendNode + + @property + def type_to_loader_dict(self) -> Dict: + if self.type_dict is None: + self.type_dict: dict[str, Any] = { + "CustomComponent": CustomComponent, + } + return self.type_dict + + def get_signature(self, name: str) -> Optional[Dict]: + from langflow.custom.customs import get_custom_nodes + + try: + if name in get_custom_nodes(self.type_name).keys(): + return get_custom_nodes(self.type_name)[name] + except ValueError as exc: + raise ValueError(f"CustomComponent {name} not found: {exc}") from exc + except AttributeError as exc: + logger.error(f"CustomComponent {name} not loaded: {exc}") + return None + return None + + def to_list(self) -> List[str]: + return list(self.type_to_loader_dict.keys()) + + +custom_component_creator = CustomComponentCreator() diff --git a/src/backend/langflow/interface/custom/code_parser.py b/src/backend/langflow/interface/custom/code_parser.py new file mode 100644 index 0000000000..d42f826358 --- /dev/null +++ b/src/backend/langflow/interface/custom/code_parser.py @@ -0,0 +1,272 @@ +import ast +import inspect +import traceback + +from typing import Dict, Any, List, Type, Union +from fastapi import HTTPException +from langflow.interface.custom.schema import CallableCodeDetails, ClassCodeDetails + + +class CodeSyntaxError(HTTPException): + pass + + +class CodeParser: + """ + A parser for Python source code, extracting code details. + """ + + def __init__(self, code: Union[str, Type]) -> None: + """ + Initializes the parser with the provided code. + """ + if isinstance(code, type): + if not inspect.isclass(code): + raise ValueError("The provided code must be a class.") + # If the code is a class, get its source code + code = inspect.getsource(code) + self.code = code + self.data: Dict[str, Any] = { + "imports": [], + "functions": [], + "classes": [], + "global_vars": [], + } + self.handlers = { + ast.Import: self.parse_imports, + ast.ImportFrom: self.parse_imports, + ast.FunctionDef: self.parse_functions, + ast.ClassDef: self.parse_classes, + ast.Assign: self.parse_global_vars, + } + + def __get_tree(self): + """ + Parses the provided code to validate its syntax. + It tries to parse the code into an abstract syntax tree (AST). + """ + try: + tree = ast.parse(self.code) + except SyntaxError as err: + raise CodeSyntaxError( + status_code=400, + detail={"error": err.msg, "traceback": traceback.format_exc()}, + ) from err + + return tree + + def parse_node(self, node: Union[ast.stmt, ast.AST]) -> None: + """ + Parses an AST node and updates the data + dictionary with the relevant information. + """ + if handler := self.handlers.get(type(node)): # type: ignore + handler(node) # type: ignore + + def parse_imports(self, node: Union[ast.Import, ast.ImportFrom]) -> None: + """ + Extracts "imports" from the code. + """ + if isinstance(node, ast.Import): + for alias in node.names: + self.data["imports"].append(alias.name) + elif isinstance(node, ast.ImportFrom): + for alias in node.names: + self.data["imports"].append((node.module, alias.name)) + + def parse_functions(self, node: ast.FunctionDef) -> None: + """ + Extracts "functions" from the code. + """ + self.data["functions"].append(self.parse_callable_details(node)) + + def parse_arg(self, arg, default): + """ + Parses an argument and its default value. + """ + arg_dict = {"name": arg.arg, "default": default} + if arg.annotation: + arg_dict["type"] = ast.unparse(arg.annotation) + return arg_dict + + def parse_callable_details(self, node: ast.FunctionDef) -> Dict[str, Any]: + """ + Extracts details from a single function or method node. + """ + func = CallableCodeDetails( + name=node.name, + doc=ast.get_docstring(node), + args=[], + body=[], + return_type=ast.unparse(node.returns) if node.returns else None, + ) + + func.args = self.parse_function_args(node) + func.body = self.parse_function_body(node) + + return func.dict() + + def parse_function_args(self, node: ast.FunctionDef) -> List[Dict[str, Any]]: + """ + Parses the arguments of a function or method node. + """ + args = [] + + args += self.parse_positional_args(node) + args += self.parse_varargs(node) + args += self.parse_keyword_args(node) + args += self.parse_kwargs(node) + + return args + + def parse_positional_args(self, node: ast.FunctionDef) -> List[Dict[str, Any]]: + """ + Parses the positional arguments of a function or method node. + """ + num_args = len(node.args.args) + num_defaults = len(node.args.defaults) + num_missing_defaults = num_args - num_defaults + missing_defaults = [None] * num_missing_defaults + default_values = [ + ast.unparse(default).strip("'") if default else None + for default in node.args.defaults + ] + # Now check all default values to see if there + # are any "None" values in the middle + default_values = [ + None if value == "None" else value for value in default_values + ] + + defaults = missing_defaults + default_values + + args = [ + self.parse_arg(arg, default) + for arg, default in zip(node.args.args, defaults) + ] + return args + + def parse_varargs(self, node: ast.FunctionDef) -> List[Dict[str, Any]]: + """ + Parses the *args argument of a function or method node. + """ + args = [] + + if node.args.vararg: + args.append(self.parse_arg(node.args.vararg, None)) + + return args + + def parse_keyword_args(self, node: ast.FunctionDef) -> List[Dict[str, Any]]: + """ + Parses the keyword-only arguments of a function or method node. + """ + kw_defaults = [None] * ( + len(node.args.kwonlyargs) - len(node.args.kw_defaults) + ) + [ + ast.unparse(default) if default else None + for default in node.args.kw_defaults + ] + + args = [ + self.parse_arg(arg, default) + for arg, default in zip(node.args.kwonlyargs, kw_defaults) + ] + return args + + def parse_kwargs(self, node: ast.FunctionDef) -> List[Dict[str, Any]]: + """ + Parses the **kwargs argument of a function or method node. + """ + args = [] + + if node.args.kwarg: + args.append(self.parse_arg(node.args.kwarg, None)) + + return args + + def parse_function_body(self, node: ast.FunctionDef) -> List[str]: + """ + Parses the body of a function or method node. + """ + return [ast.unparse(line) for line in node.body] + + def parse_assign(self, stmt): + """ + Parses an Assign statement and returns a dictionary + with the target's name and value. + """ + for target in stmt.targets: + if isinstance(target, ast.Name): + return {"name": target.id, "value": ast.unparse(stmt.value)} + + def parse_ann_assign(self, stmt): + """ + Parses an AnnAssign statement and returns a dictionary + with the target's name, value, and annotation. + """ + if isinstance(stmt.target, ast.Name): + return { + "name": stmt.target.id, + "value": ast.unparse(stmt.value) if stmt.value else None, + "annotation": ast.unparse(stmt.annotation), + } + + def parse_function_def(self, stmt): + """ + Parses a FunctionDef statement and returns the parsed + method and a boolean indicating if it's an __init__ method. + """ + method = self.parse_callable_details(stmt) + return (method, True) if stmt.name == "__init__" else (method, False) + + def parse_classes(self, node: ast.ClassDef) -> None: + """ + Extracts "classes" from the code, including inheritance and init methods. + """ + + class_details = ClassCodeDetails( + name=node.name, + doc=ast.get_docstring(node), + bases=[ast.unparse(base) for base in node.bases], + attributes=[], + methods=[], + init=None, + ) + + for stmt in node.body: + if isinstance(stmt, ast.Assign): + if attr := self.parse_assign(stmt): + class_details.attributes.append(attr) + elif isinstance(stmt, ast.AnnAssign): + if attr := self.parse_ann_assign(stmt): + class_details.attributes.append(attr) + elif isinstance(stmt, ast.FunctionDef): + method, is_init = self.parse_function_def(stmt) + if is_init: + class_details.init = method + else: + class_details.methods.append(method) + + self.data["classes"].append(class_details.dict()) + + def parse_global_vars(self, node: ast.Assign) -> None: + """ + Extracts global variables from the code. + """ + global_var = { + "targets": [ + t.id if hasattr(t, "id") else ast.dump(t) for t in node.targets + ], + "value": ast.unparse(node.value), + } + self.data["global_vars"].append(global_var) + + def parse_code(self) -> Dict[str, Any]: + """ + Runs all parsing operations and returns the resulting data. + """ + tree = self.__get_tree() + + for node in ast.walk(tree): + self.parse_node(node) + return self.data diff --git a/src/backend/langflow/interface/custom/component.py b/src/backend/langflow/interface/custom/component.py new file mode 100644 index 0000000000..a9dc0f3234 --- /dev/null +++ b/src/backend/langflow/interface/custom/component.py @@ -0,0 +1,72 @@ +import ast +from typing import Optional +from pydantic import BaseModel +from fastapi import HTTPException + +from langflow.utils import validate +from langflow.interface.custom.code_parser import CodeParser + + +class ComponentCodeNullError(HTTPException): + pass + + +class ComponentFunctionEntrypointNameNullError(HTTPException): + pass + + +class Component(BaseModel): + ERROR_CODE_NULL = "Python code must be provided." + ERROR_FUNCTION_ENTRYPOINT_NAME_NULL = ( + "The name of the entrypoint function must be provided." + ) + + code: Optional[str] + function_entrypoint_name = "build" + field_config: dict = {} + + def __init__(self, **data): + super().__init__(**data) + + def get_code_tree(self, code: str): + parser = CodeParser(code) + return parser.parse_code() + + def get_function(self): + if not self.code: + raise ComponentCodeNullError( + status_code=400, + detail={"error": self.ERROR_CODE_NULL, "traceback": ""}, + ) + + if not self.function_entrypoint_name: + raise ComponentFunctionEntrypointNameNullError( + status_code=400, + detail={ + "error": self.ERROR_FUNCTION_ENTRYPOINT_NAME_NULL, + "traceback": "", + }, + ) + + return validate.create_function(self.code, self.function_entrypoint_name) + + def build_template_config(self, attributes) -> dict: + template_config = {} + + for item in attributes: + item_name = item.get("name") + + if item_value := item.get("value"): + if "display_name" in item_name: + template_config["display_name"] = ast.literal_eval(item_value) + + elif "description" in item_name: + template_config["description"] = ast.literal_eval(item_value) + + elif "field_config" in item_name: + template_config["field_config"] = ast.literal_eval(item_value) + + return template_config + + def build(self): + raise NotImplementedError diff --git a/src/backend/langflow/interface/custom/constants.py b/src/backend/langflow/interface/custom/constants.py new file mode 100644 index 0000000000..8e5db39b8d --- /dev/null +++ b/src/backend/langflow/interface/custom/constants.py @@ -0,0 +1,59 @@ +from langchain import PromptTemplate +from langchain.chains.base import Chain +from langchain.document_loaders.base import BaseLoader +from langchain.embeddings.base import Embeddings +from langchain.llms.base import BaseLLM +from langchain.schema import BaseRetriever, Document +from langchain.text_splitter import TextSplitter +from langchain.tools import Tool +from langchain.vectorstores.base import VectorStore + + +LANGCHAIN_BASE_TYPES = { + "Chain": Chain, + "Tool": Tool, + "BaseLLM": BaseLLM, + "PromptTemplate": PromptTemplate, + "BaseLoader": BaseLoader, + "Document": Document, + "TextSplitter": TextSplitter, + "VectorStore": VectorStore, + "Embeddings": Embeddings, + "BaseRetriever": BaseRetriever, +} + +# Langchain base types plus Python base types +CUSTOM_COMPONENT_SUPPORTED_TYPES = { + **LANGCHAIN_BASE_TYPES, + "str": str, + "int": int, + "float": float, + "bool": bool, + "list": list, + "dict": dict, +} + + +DEFAULT_CUSTOM_COMPONENT_CODE = """ +from langflow import CustomComponent + +from langchain.llms.base import BaseLLM +from langchain.chains import LLMChain +from langchain import PromptTemplate +from langchain.schema import Document + +import requests + +class YourComponent(CustomComponent): + display_name: str = "Your Component" + description: str = "Your description" + + def build_config(self): + return { "url": { "multiline": True, "required": True } } + + def build(self, url: str, llm: BaseLLM, prompt: PromptTemplate) -> Document: + response = requests.get(url) + chain = LLMChain(llm=llm, prompt=prompt) + result = chain.run(response.text[:300]) + return Document(page_content=str(result)) +""" diff --git a/src/backend/langflow/interface/custom/custom_component.py b/src/backend/langflow/interface/custom/custom_component.py new file mode 100644 index 0000000000..353298cbd0 --- /dev/null +++ b/src/backend/langflow/interface/custom/custom_component.py @@ -0,0 +1,194 @@ +from typing import Any, Callable, List, Optional +from fastapi import HTTPException +from langflow.interface.custom.constants import CUSTOM_COMPONENT_SUPPORTED_TYPES +from langflow.interface.custom.component import Component +from langflow.interface.custom.directory_reader import DirectoryReader + +from langflow.utils import validate + +from langflow.database.base import session_getter +from langflow.database.models.flow import Flow +from pydantic import Extra + + +class CustomComponent(Component, extra=Extra.allow): + code: Optional[str] + field_config: dict = {} + code_class_base_inheritance = "CustomComponent" + function_entrypoint_name = "build" + function: Optional[Callable] = None + return_type_valid_list = list(CUSTOM_COMPONENT_SUPPORTED_TYPES.keys()) + repr_value: Optional[str] = "" + + def __init__(self, **data): + super().__init__(**data) + + def custom_repr(self): + return str(self.repr_value) + + def build_config(self): + return self.field_config + + def _class_template_validation(self, code: str) -> bool: + TYPE_HINT_LIST = ["Optional", "Prompt", "PromptTemplate", "LLMChain"] + + if not code: + raise HTTPException( + status_code=400, + detail={ + "error": self.ERROR_CODE_NULL, + "traceback": "", + }, + ) + + reader = DirectoryReader("", False) + + for type_hint in TYPE_HINT_LIST: + if reader.is_type_hint_used_but_not_imported(type_hint, code): + error_detail = { + "error": "Type hint Error", + "traceback": f"Type hint '{type_hint}' is used but not imported in the code.", + } + raise HTTPException(status_code=400, detail=error_detail) + + def is_check_valid(self) -> bool: + return self._class_template_validation(self.code) + + def get_code_tree(self, code: str): + return super().get_code_tree(code) + + @property + def get_function_entrypoint_args(self) -> str: + if not self.code: + return "" + tree = self.get_code_tree(self.code) + + component_classes = [ + cls + for cls in tree["classes"] + if self.code_class_base_inheritance in cls["bases"] + ] + if not component_classes: + return "" + + # Assume the first Component class is the one we're interested in + component_class = component_classes[0] + build_methods = [ + method + for method in component_class["methods"] + if method["name"] == self.function_entrypoint_name + ] + + if not build_methods: + return "" + + build_method = build_methods[0] + + return build_method["args"] + + @property + def get_function_entrypoint_return_type(self) -> str: + if not self.code: + return "" + tree = self.get_code_tree(self.code) + + component_classes = [ + cls + for cls in tree["classes"] + if self.code_class_base_inheritance in cls["bases"] + ] + if not component_classes: + return "" + + # Assume the first Component class is the one we're interested in + component_class = component_classes[0] + build_methods = [ + method + for method in component_class["methods"] + if method["name"] == self.function_entrypoint_name + ] + + if not build_methods: + return "" + + build_method = build_methods[0] + + return build_method["return_type"] + + @property + def get_main_class_name(self): + tree = self.get_code_tree(self.code) + + base_name = self.code_class_base_inheritance + method_name = self.function_entrypoint_name + + classes = [] + for item in tree.get("classes"): + if base_name in item["bases"]: + method_names = [method["name"] for method in item["methods"]] + if method_name in method_names: + classes.append(item["name"]) + + # Get just the first item + return next(iter(classes), "") + + @property + def build_template_config(self): + tree = self.get_code_tree(self.code) + + attributes = [ + main_class["attributes"] + for main_class in tree.get("classes") + if main_class["name"] == self.get_main_class_name + ] + # Get just the first item + attributes = next(iter(attributes), []) + + return super().build_template_config(attributes) + + @property + def get_function(self): + return validate.create_function(self.code, self.function_entrypoint_name) + + def load_flow(self, flow_id: str, tweaks: Optional[dict] = None) -> Any: + from langflow.processing.process import build_sorted_vertices_with_caching + from langflow.processing.process import process_tweaks + + with session_getter() as session: + graph_data = flow.data if (flow := session.get(Flow, flow_id)) else None + if not graph_data: + raise ValueError(f"Flow {flow_id} not found") + if tweaks: + graph_data = process_tweaks(graph_data=graph_data, tweaks=tweaks) + return build_sorted_vertices_with_caching(graph_data) + + def list_flows(self, *, get_session: Optional[Callable] = None) -> List[Flow]: + get_session = get_session or session_getter + with get_session() as session: + flows = session.query(Flow).all() + return flows + + def get_flow( + self, + *, + flow_name: Optional[str] = None, + flow_id: Optional[str] = None, + tweaks: Optional[dict] = None, + get_session: Optional[Callable] = None, + ) -> Flow: + get_session = get_session or session_getter + + with get_session() as session: + if flow_id: + flow = session.query(Flow).get(flow_id) + elif flow_name: + flow = session.query(Flow).filter(Flow.name == flow_name).first() + else: + raise ValueError("Either flow_name or flow_id must be provided") + + if not flow: + raise ValueError(f"Flow {flow_name or flow_id} not found") + return self.load_flow(flow.id, tweaks) + + def build(self): + raise NotImplementedError diff --git a/src/backend/langflow/interface/custom/directory_reader.py b/src/backend/langflow/interface/custom/directory_reader.py new file mode 100644 index 0000000000..3357d164f9 --- /dev/null +++ b/src/backend/langflow/interface/custom/directory_reader.py @@ -0,0 +1,239 @@ +import os +import ast +import zlib + + +class CustomComponentPathValueError(ValueError): + pass + + +class StringCompressor: + def __init__(self, input_string): + """Initialize StringCompressor with a string to compress.""" + self.input_string = input_string + + def compress_string(self): + """ + Compress the initial string and return the compressed data. + """ + # Convert string to bytes + byte_data = self.input_string.encode("utf-8") + # Compress the bytes + self.compressed_data = zlib.compress(byte_data) + + return self.compressed_data + + def decompress_string(self): + """ + Decompress the compressed data and return the original string. + """ + # Decompress the bytes + decompressed_data = zlib.decompress(self.compressed_data) + # Convert bytes back to string + return decompressed_data.decode("utf-8") + + +class DirectoryReader: + # Ensure the base path to read the files that contain + # the custom components from this directory. + base_path = "" + + def __init__(self, directory_path, compress_code_field=False): + """ + Initialize DirectoryReader with a directory path + and a flag indicating whether to compress the code. + """ + self.directory_path = directory_path + self.compress_code_field = compress_code_field + + def get_safe_path(self): + """Check if the path is valid and return it, or None if it's not.""" + return self.directory_path if self.is_valid_path() else None + + def is_valid_path(self) -> bool: + """Check if the directory path is valid by comparing it to the base path.""" + fullpath = os.path.normpath(os.path.join(self.directory_path)) + return fullpath.startswith(self.base_path) + + def is_empty_file(self, file_content): + """ + Check if the file content is empty. + """ + return len(file_content.strip()) == 0 + + def filter_loaded_components(self, data: dict, with_errors: bool) -> dict: + items = [ + { + "name": menu["name"], + "path": menu["path"], + "components": [ + component + for component in menu["components"] + if (component["error"] if with_errors else not component["error"]) + ], + } + for menu in data["menu"] + ] + filtred = [menu for menu in items if menu["components"]] + return {"menu": filtred} + + def validate_code(self, file_content): + """ + Validate the Python code by trying to parse it with ast.parse. + """ + try: + ast.parse(file_content) + return True + except SyntaxError: + return False + + def validate_build(self, file_content): + """ + Check if the file content contains a function named 'build'. + """ + return "def build" in file_content + + def read_file_content(self, file_path): + """ + Read and return the content of a file. + """ + if not os.path.isfile(file_path): + return None + with open(file_path, "r") as file: + return file.read() + + def get_files(self): + """ + Walk through the directory path and return a list of all .py files. + """ + if not (safe_path := self.get_safe_path()): + raise CustomComponentPathValueError( + f"The path needs to start with '{self.base_path}'." + ) + + file_list = [] + for root, _, files in os.walk(safe_path): + file_list.extend( + os.path.join(root, filename) + for filename in files + if filename.endswith(".py") + ) + return file_list + + def find_menu(self, response, menu_name): + """ + Find and return a menu by its name in the response. + """ + return next( + (menu for menu in response["menu"] if menu["name"] == menu_name), + None, + ) + + def _is_type_hint_imported(self, type_hint_name: str, code: str) -> bool: + """ + Check if a specific type hint is imported + from the typing module in the given code. + """ + module = ast.parse(code) + + return any( + isinstance(node, ast.ImportFrom) + and node.module == "typing" + and any(alias.name == type_hint_name for alias in node.names) + for node in ast.walk(module) + ) + + def _is_type_hint_used_in_args(self, type_hint_name: str, code: str) -> bool: + """ + Check if a specific type hint is used in the + function definitions within the given code. + """ + module = ast.parse(code) + + for node in ast.walk(module): + if isinstance(node, ast.FunctionDef): + for arg in node.args.args: + if self._is_type_hint_in_arg_annotation( + arg.annotation, type_hint_name + ): + return True + return False + + def _is_type_hint_in_arg_annotation(self, annotation, type_hint_name: str) -> bool: + """ + Helper function to check if a type hint exists in an annotation. + """ + return ( + annotation is not None + and isinstance(annotation, ast.Subscript) + and isinstance(annotation.value, ast.Name) + and annotation.value.id == type_hint_name + ) + + def is_type_hint_used_but_not_imported( + self, type_hint_name: str, code: str + ) -> bool: + """ + Check if a type hint is used but not imported in the given code. + """ + try: + return self._is_type_hint_used_in_args( + type_hint_name, code + ) and not self._is_type_hint_imported(type_hint_name, code) + except SyntaxError: + # Returns True if there's something wrong with the code + # TODO : Find a better way to handle this + return True + + def process_file(self, file_path): + """ + Process a file by validating its content and + returning the result and content/error message. + """ + file_content = self.read_file_content(file_path) + + if file_content is None: + return False, f"Could not read {file_path}" + elif self.is_empty_file(file_content): + return False, "Empty file" + elif not self.validate_code(file_content): + return False, "Syntax error" + elif not self.validate_build(file_content): + return False, "Missing build function" + elif self.is_type_hint_used_but_not_imported("Optional", file_content): + return False, "Type hint 'Optional' is used but not imported in the code." + else: + if self.compress_code_field: + file_content = str(StringCompressor(file_content).compress_string()) + return True, file_content + + def build_component_menu_list(self, file_paths): + """ + Build a list of menus with their components + from the .py files in the directory. + """ + response = {"menu": []} + + for file_path in file_paths: + menu_name = os.path.basename(os.path.dirname(file_path)) + filename = os.path.basename(file_path) + validation_result, result_content = self.process_file(file_path) + + menu_result = self.find_menu(response, menu_name) or { + "name": menu_name, + "path": os.path.dirname(file_path), + "components": [], + } + + component_info = { + "name": filename.split(".")[0], + "file": filename, + "code": result_content if validation_result else "", + "error": "" if validation_result else result_content, + } + menu_result["components"].append(component_info) + + if menu_result not in response["menu"]: + response["menu"].append(menu_result) + + return response diff --git a/src/backend/langflow/interface/custom/schema.py b/src/backend/langflow/interface/custom/schema.py new file mode 100644 index 0000000000..80d65405f3 --- /dev/null +++ b/src/backend/langflow/interface/custom/schema.py @@ -0,0 +1,29 @@ +from pydantic import BaseModel, Field + + +from typing import Optional + + +class ClassCodeDetails(BaseModel): + """ + A dataclass for storing details about a class. + """ + + name: str + doc: Optional[str] + bases: list + attributes: list + methods: list + init: Optional[dict] = Field(default_factory=dict) + + +class CallableCodeDetails(BaseModel): + """ + A dataclass for storing details about a callable. + """ + + name: str + doc: Optional[str] + args: list + body: list + return_type: Optional[str] diff --git a/src/backend/langflow/interface/importing/utils.py b/src/backend/langflow/interface/importing/utils.py index ccfd8d5dd6..0acb2cff50 100644 --- a/src/backend/langflow/interface/importing/utils.py +++ b/src/backend/langflow/interface/importing/utils.py @@ -9,6 +9,7 @@ from langchain.chains.base import Chain from langchain.chat_models.base import BaseChatModel from langchain.tools import BaseTool +from langflow.interface.custom.custom_component import CustomComponent from langflow.utils import validate from langflow.interface.wrappers.base import wrapper_creator @@ -47,6 +48,7 @@ def import_by_type(_type: str, name: str) -> Any: "utilities": import_utility, "output_parsers": import_output_parser, "retrievers": import_retriever, + "custom_components": import_custom_component, } if _type == "llms": key = "chat" if "chat" in name.lower() else "llm" @@ -57,6 +59,13 @@ def import_by_type(_type: str, name: str) -> Any: return loaded_func(name) +def import_custom_component(custom_component: str) -> CustomComponent: + """Import custom component from custom component name""" + return import_class( + f"langflow.interface.custom.custom_component.{custom_component}" + ) + + def import_output_parser(output_parser: str) -> Any: """Import output parser from output parser name""" return import_module(f"from langchain.output_parsers import {output_parser}") @@ -172,3 +181,8 @@ def get_function(code): function_name = validate.extract_function_name(code) return validate.create_function(code, function_name) + + +def get_function_custom(code): + class_name = validate.extract_class_name(code) + return validate.create_class(code, class_name) diff --git a/src/backend/langflow/interface/initialize/loading.py b/src/backend/langflow/interface/initialize/loading.py index b232d089ca..f6da0edc77 100644 --- a/src/backend/langflow/interface/initialize/loading.py +++ b/src/backend/langflow/interface/initialize/loading.py @@ -1,21 +1,23 @@ -import contextlib import json -from typing import Any, Callable, Dict, List, Sequence, Type +from typing import Any, Callable, Dict, Sequence, Type -from langchain.agents import ZeroShotAgent from langchain.agents import agent as agent_module from langchain.agents.agent import AgentExecutor from langchain.agents.agent_toolkits.base import BaseToolkit from langchain.agents.tools import BaseTool from langflow.interface.initialize.llm import initialize_vertexai +from langflow.interface.initialize.utils import handle_format_kwargs, handle_node_type from langflow.interface.initialize.vector_store import vecstore_initializer -from langchain.schema import Document, BaseOutputParser from pydantic import ValidationError +from langflow.interface.importing.utils import ( + get_function, + get_function_custom, + import_by_type, +) from langflow.interface.custom_lists import CUSTOM_NODES -from langflow.interface.importing.utils import get_function, import_by_type from langflow.interface.agents.base import agent_creator from langflow.interface.toolkits.base import toolkits_creator from langflow.interface.chains.base import chain_creator @@ -95,12 +97,21 @@ def instantiate_based_on_type(class_object, base_type, node_type, params): return instantiate_retriever(node_type, class_object, params) elif base_type == "memory": return instantiate_memory(node_type, class_object, params) + elif base_type == "custom_components": + return instantiate_custom_component(node_type, class_object, params) elif base_type == "wrappers": return instantiate_wrapper(node_type, class_object, params) else: return class_object(**params) +def instantiate_custom_component(node_type, class_object, params): + class_object = get_function_custom(params.pop("code")) + custom_component = class_object() + built_object = custom_component.build(**params) + return built_object, {"repr": custom_component.custom_repr()} + + def instantiate_wrapper(node_type, class_object, params): if node_type in wrapper_creator.from_method_nodes: method = wrapper_creator.from_method_nodes[node_type] @@ -199,68 +210,8 @@ def instantiate_agent(node_type, class_object: Type[agent_module.Agent], params: def instantiate_prompt(node_type, class_object, params: Dict): - if node_type == "ZeroShotPrompt": - if "tools" not in params: - params["tools"] = [] - return ZeroShotAgent.create_prompt(**params) - elif "MessagePromptTemplate" in node_type: - # Then we only need the template - from_template_params = { - "template": params.pop("prompt", params.pop("template", "")) - } - - if not from_template_params.get("template"): - raise ValueError("Prompt template is required") - prompt = class_object.from_template(**from_template_params) - - elif node_type == "ChatPromptTemplate": - prompt = class_object.from_messages(**params) - else: - prompt = class_object(**params) - - format_kwargs: Dict[str, Any] = {} - for input_variable in prompt.input_variables: - if input_variable in params: - variable = params[input_variable] - if isinstance(variable, str): - format_kwargs[input_variable] = variable - elif isinstance(variable, BaseOutputParser) and hasattr( - variable, "get_format_instructions" - ): - format_kwargs[input_variable] = variable.get_format_instructions() - elif isinstance(variable, List) and all( - isinstance(item, Document) for item in variable - ): - # Format document to contain page_content and metadata - # as one string separated by a newline - if len(variable) > 1: - content = "\n".join( - [item.page_content for item in variable if item.page_content] - ) - else: - content = variable[0].page_content - # content could be a json list of strings - with contextlib.suppress(json.JSONDecodeError): - content = json.loads(content) - if isinstance(content, list): - content = ",".join([str(item) for item in content]) - format_kwargs[input_variable] = content - # handle_keys will be a list but it does not exist yet - # so we need to create it - - if ( - isinstance(variable, List) - and all(isinstance(item, Document) for item in variable) - ) or ( - isinstance(variable, BaseOutputParser) - and hasattr(variable, "get_format_instructions") - ): - if "handle_keys" not in format_kwargs: - format_kwargs["handle_keys"] = [] - - # Add the handle_keys to the list - format_kwargs["handle_keys"].append(input_variable) - + params, prompt = handle_node_type(node_type, class_object, params) + format_kwargs = handle_format_kwargs(prompt, params) return prompt, format_kwargs @@ -363,6 +314,8 @@ def instantiate_textsplitter( ): try: documents = params.pop("documents") + if not isinstance(documents, list): + documents = [documents] except KeyError as exc: raise ValueError( "The source you provided did not load correctly or was empty." diff --git a/src/backend/langflow/interface/initialize/utils.py b/src/backend/langflow/interface/initialize/utils.py new file mode 100644 index 0000000000..31fbc6d8b5 --- /dev/null +++ b/src/backend/langflow/interface/initialize/utils.py @@ -0,0 +1,103 @@ +import contextlib +import json +from typing import Any, Dict, List + +from langchain.agents import ZeroShotAgent + + +from langchain.schema import Document, BaseOutputParser + + +def handle_node_type(node_type, class_object, params: Dict): + if node_type == "ZeroShotPrompt": + params = check_tools_in_params(params) + prompt = ZeroShotAgent.create_prompt(**params) + elif "MessagePromptTemplate" in node_type: + prompt = instantiate_from_template(class_object, params) + elif node_type == "ChatPromptTemplate": + prompt = class_object.from_messages(**params) + else: + prompt = class_object(**params) + return params, prompt + + +def check_tools_in_params(params: Dict): + if "tools" not in params: + params["tools"] = [] + return params + + +def instantiate_from_template(class_object, params: Dict): + from_template_params = { + "template": params.pop("prompt", params.pop("template", "")) + } + if not from_template_params.get("template"): + raise ValueError("Prompt template is required") + return class_object.from_template(**from_template_params) + + +def handle_format_kwargs(prompt, params: Dict): + format_kwargs: Dict[str, Any] = {} + for input_variable in prompt.input_variables: + if input_variable in params: + format_kwargs = handle_variable(params, input_variable, format_kwargs) + return format_kwargs + + +def handle_variable(params: Dict, input_variable: str, format_kwargs: Dict): + variable = params[input_variable] + if isinstance(variable, str): + format_kwargs[input_variable] = variable + elif isinstance(variable, BaseOutputParser) and hasattr( + variable, "get_format_instructions" + ): + format_kwargs[input_variable] = variable.get_format_instructions() + elif is_instance_of_list_or_document(variable): + format_kwargs = format_document(variable, input_variable, format_kwargs) + if needs_handle_keys(variable): + format_kwargs = add_handle_keys(input_variable, format_kwargs) + return format_kwargs + + +def is_instance_of_list_or_document(variable): + return ( + isinstance(variable, List) + and all(isinstance(item, Document) for item in variable) + or isinstance(variable, Document) + ) + + +def format_document(variable, input_variable: str, format_kwargs: Dict): + variable = variable if isinstance(variable, List) else [variable] + content = format_content(variable) + format_kwargs[input_variable] = content + return format_kwargs + + +def format_content(variable): + if len(variable) > 1: + return "\n".join([item.page_content for item in variable if item.page_content]) + content = variable[0].page_content + return try_to_load_json(content) + + +def try_to_load_json(content): + with contextlib.suppress(json.JSONDecodeError): + content = json.loads(content) + if isinstance(content, list): + content = ",".join([str(item) for item in content]) + return content + + +def needs_handle_keys(variable): + return is_instance_of_list_or_document(variable) or ( + isinstance(variable, BaseOutputParser) + and hasattr(variable, "get_format_instructions") + ) + + +def add_handle_keys(input_variable: str, format_kwargs: Dict): + if "handle_keys" not in format_kwargs: + format_kwargs["handle_keys"] = [] + format_kwargs["handle_keys"].append(input_variable) + return format_kwargs diff --git a/src/backend/langflow/interface/listing.py b/src/backend/langflow/interface/listing.py index 0893f855a2..fe3090f658 100644 --- a/src/backend/langflow/interface/listing.py +++ b/src/backend/langflow/interface/listing.py @@ -13,6 +13,7 @@ from langflow.interface.wrappers.base import wrapper_creator from langflow.interface.output_parsers.base import output_parser_creator from langflow.interface.retrievers.base import retriever_creator +from langflow.interface.custom.base import custom_component_creator def get_type_dict(): @@ -32,6 +33,7 @@ def get_type_dict(): "utilities": utility_creator.to_list(), "outputParsers": output_parser_creator.to_list(), "retrievers": retriever_creator.to_list(), + "custom_components": custom_component_creator.to_list(), } diff --git a/src/backend/langflow/interface/tools/base.py b/src/backend/langflow/interface/tools/base.py index 027224a3ae..f8965134d7 100644 --- a/src/backend/langflow/interface/tools/base.py +++ b/src/backend/langflow/interface/tools/base.py @@ -55,7 +55,7 @@ show=True, value="", suffixes=[".json", ".yaml", ".yml"], - fileTypes=["json", "yaml", "yml"], + file_types=["json", "yaml", "yml"], ), } diff --git a/src/backend/langflow/interface/tools/constants.py b/src/backend/langflow/interface/tools/constants.py index fea3c5237e..dc1bfe0c1c 100644 --- a/src/backend/langflow/interface/tools/constants.py +++ b/src/backend/langflow/interface/tools/constants.py @@ -9,7 +9,10 @@ from langchain.tools.json.tool import JsonSpec from langflow.interface.importing.utils import import_class -from langflow.interface.tools.custom import PythonFunctionTool, PythonFunction +from langflow.interface.tools.custom import ( + PythonFunctionTool, + PythonFunction, +) FILE_TOOLS = {"JsonSpec": JsonSpec} CUSTOM_TOOLS = { diff --git a/src/backend/langflow/interface/tools/custom.py b/src/backend/langflow/interface/tools/custom.py index 0e2e5ff571..321298e347 100644 --- a/src/backend/langflow/interface/tools/custom.py +++ b/src/backend/langflow/interface/tools/custom.py @@ -34,8 +34,6 @@ def get_function(self): class PythonFunctionTool(Function, Tool): - """Python function""" - name: str = "Custom Tool" description: str code: str @@ -49,6 +47,4 @@ def ___init__(self, name: str, description: str, code: str): class PythonFunction(Function): - """Python function""" - code: str diff --git a/src/backend/langflow/interface/types.py b/src/backend/langflow/interface/types.py index 72ecb67750..1420479a4d 100644 --- a/src/backend/langflow/interface/types.py +++ b/src/backend/langflow/interface/types.py @@ -1,7 +1,9 @@ from langflow.interface.agents.base import agent_creator from langflow.interface.chains.base import chain_creator +from langflow.interface.custom.constants import CUSTOM_COMPONENT_SUPPORTED_TYPES from langflow.interface.document_loaders.base import documentloader_creator from langflow.interface.embeddings.base import embedding_creator +from langflow.interface.importing.utils import get_function_custom from langflow.interface.llms.base import llm_creator from langflow.interface.memories.base import memory_creator from langflow.interface.prompts.base import prompt_creator @@ -12,9 +14,28 @@ from langflow.interface.vector_store.base import vectorstore_creator from langflow.interface.wrappers.base import wrapper_creator from langflow.interface.output_parsers.base import output_parser_creator +from langflow.interface.custom.base import custom_component_creator +from langflow.interface.custom.custom_component import CustomComponent + +from langflow.template.field.base import TemplateField +from langflow.template.frontend_node.constants import CLASSES_TO_REMOVE +from langflow.template.frontend_node.custom_components import ( + CustomComponentFrontendNode, +) from langflow.interface.retrievers.base import retriever_creator +from langflow.interface.custom.directory_reader import DirectoryReader +from langflow.utils.logger import logger +from langflow.utils.util import get_base_classes +from langflow.api.utils import merge_nested_dicts + +import re +import warnings +import traceback +from fastapi import HTTPException + +# Used to get the base_classes list def get_type_list(): """Get a list of all langchain types""" all_types = build_langchain_types_dict() @@ -29,7 +50,6 @@ def get_type_list(): def build_langchain_types_dict(): # sourcery skip: dict-assign-update-to-union """Build a dictionary of all langchain types""" - all_types = {} creators = [ @@ -48,6 +68,7 @@ def build_langchain_types_dict(): # sourcery skip: dict-assign-update-to-union utility_creator, output_parser_creator, retriever_creator, + custom_component_creator, ] all_types = {} @@ -55,7 +76,315 @@ def build_langchain_types_dict(): # sourcery skip: dict-assign-update-to-union created_types = creator.to_dict() if created_types[creator.type_name].values(): all_types.update(created_types) + return all_types -langchain_types_dict = build_langchain_types_dict() +def process_type(field_type: str): + return "prompt" if field_type == "Prompt" else field_type + + +# TODO: Move to correct place +def add_new_custom_field( + template, + field_name: str, + field_type: str, + field_value: str, + field_required: bool, + field_config: dict, +): + # Check field_config if any of the keys are in it + # if it is, update the value + display_name = field_config.pop("display_name", field_name) + field_type = field_config.pop("field_type", field_type) + field_type = process_type(field_type) + field_value = field_config.pop("value", field_value) + field_advanced = field_config.pop("advanced", False) + + if field_type == "bool" and field_value is None: + field_value = False + + # If options is a list, then it's a dropdown + # If options is None, then it's a list of strings + is_list = isinstance(field_config.get("options"), list) + field_config["is_list"] = is_list or field_config.get("is_list", False) + + if "name" in field_config: + warnings.warn( + "The 'name' key in field_config is used to build the object and can't be changed." + ) + field_config.pop("name", None) + + required = field_config.pop("required", field_required) + placeholder = field_config.pop("placeholder", "") + + new_field = TemplateField( + name=field_name, + field_type=field_type, + value=field_value, + show=True, + required=required, + advanced=field_advanced, + placeholder=placeholder, + display_name=display_name, + **field_config, + ) + template.get("template")[field_name] = new_field.to_dict() + template.get("custom_fields")[field_name] = None + + return template + + +# TODO: Move to correct place +def add_code_field(template, raw_code, field_config): + # Field with the Python code to allow update + + code_field = { + "code": { + "dynamic": True, + "required": True, + "placeholder": "", + "show": True, + "multiline": True, + "value": raw_code, + "password": False, + "name": "code", + "advanced": field_config.pop("advanced", False), + "type": "code", + "list": False, + } + } + template.get("template")["code"] = code_field.get("code") + + return template + + +def extract_type_from_optional(field_type): + """ + Extract the type from a string formatted as "Optional[]". + + Parameters: + field_type (str): The string from which to extract the type. + + Returns: + str: The extracted type, or an empty string if no type was found. + """ + match = re.search(r"\[(.*?)\]", field_type) + return match[1] if match else None + + +def build_frontend_node(custom_component: CustomComponent): + """Build a frontend node for a custom component""" + try: + return ( + CustomComponentFrontendNode().to_dict().get(type(custom_component).__name__) + ) + + except Exception as exc: + logger.error(f"Error while building base frontend node: {exc}") + return None + + +def update_display_name_and_description(frontend_node, template_config): + """Update the display name and description of a frontend node""" + if "display_name" in template_config: + frontend_node["display_name"] = template_config["display_name"] + + if "description" in template_config: + frontend_node["description"] = template_config["description"] + + +def build_field_config(custom_component: CustomComponent): + """Build the field configuration for a custom component""" + + try: + custom_class = get_function_custom(custom_component.code) + except Exception as exc: + logger.error(f"Error while getting custom function: {str(exc)}") + return {} + + try: + return custom_class().build_config() + except Exception as exc: + logger.error(f"Error while building field config: {str(exc)}") + return {} + + +def add_extra_fields(frontend_node, field_config, function_args): + """Add extra fields to the frontend node""" + if function_args is None or function_args == "": + return + + # sort function_args which is a list of dicts + function_args.sort(key=lambda x: x["name"]) + + for extra_field in function_args: + if "name" not in extra_field or extra_field["name"] == "self": + continue + + field_name, field_type, field_value, field_required = get_field_properties( + extra_field + ) + config = field_config.get(field_name, {}) + frontend_node = add_new_custom_field( + frontend_node, + field_name, + field_type, + field_value, + field_required, + config, + ) + + +def get_field_properties(extra_field): + """Get the properties of an extra field""" + field_name = extra_field["name"] + field_type = extra_field.get("type", "str") + field_value = extra_field.get("default", "") + field_required = "optional" not in field_type.lower() + + if not field_required: + field_type = extract_type_from_optional(field_type) + + return field_name, field_type, field_value, field_required + + +def add_base_classes(frontend_node, return_type): + """Add base classes to the frontend node""" + if return_type not in CUSTOM_COMPONENT_SUPPORTED_TYPES or return_type is None: + raise HTTPException( + status_code=400, + detail={ + "error": ( + "Invalid return type should be one of: " + f"{list(CUSTOM_COMPONENT_SUPPORTED_TYPES.keys())}" + ), + "traceback": traceback.format_exc(), + }, + ) + + return_type_instance = CUSTOM_COMPONENT_SUPPORTED_TYPES.get(return_type) + base_classes = get_base_classes(return_type_instance) + + for base_class in base_classes: + if base_class not in CLASSES_TO_REMOVE: + frontend_node.get("base_classes").append(base_class) + + +def build_langchain_template_custom_component(custom_component: CustomComponent): + """Build a custom component template for the langchain""" + frontend_node = build_frontend_node(custom_component) + + if frontend_node is None: + return None + + template_config = custom_component.build_template_config + + update_display_name_and_description(frontend_node, template_config) + + field_config = build_field_config(custom_component) + add_extra_fields( + frontend_node, field_config, custom_component.get_function_entrypoint_args + ) + + frontend_node = add_code_field( + frontend_node, custom_component.code, field_config.get("code", {}) + ) + + add_base_classes( + frontend_node, custom_component.get_function_entrypoint_return_type + ) + + return frontend_node + + +def load_files_from_path(path: str): + """Load all files from a given path""" + reader = DirectoryReader(path, False) + + return reader.get_files() + + +def build_and_validate_all_files(reader, file_list): + """Build and validate all files""" + data = reader.build_component_menu_list(file_list) + + valid_components = reader.filter_loaded_components(data=data, with_errors=False) + invalid_components = reader.filter_loaded_components(data=data, with_errors=True) + + return valid_components, invalid_components + + +def build_valid_menu(valid_components): + """Build the valid menu""" + valid_menu = {} + for menu_item in valid_components["menu"]: + menu_name = menu_item["name"] + valid_menu[menu_name] = {} + + for component in menu_item["components"]: + try: + component_name = component["name"] + component_code = component["code"] + + component_extractor = CustomComponent(code=component_code) + component_extractor.is_check_valid() + component_template = build_langchain_template_custom_component( + component_extractor + ) + + valid_menu[menu_name][component_name] = component_template + + except Exception as exc: + logger.error(f"Error while building custom component: {exc}") + + return valid_menu + + +def build_invalid_menu(invalid_components): + """Build the invalid menu""" + invalid_menu = {} + for menu_item in invalid_components["menu"]: + menu_name = menu_item["name"] + invalid_menu[menu_name] = {} + + for component in menu_item["components"]: + try: + component_name = component["name"] + component_code = component["code"] + + component_template = ( + CustomComponentFrontendNode( + description="ERROR - Check your Python Code", + display_name=f"ERROR - {component_name}", + ) + .to_dict() + .get(type(CustomComponent()).__name__) + ) + + component_template["error"] = component.get("error", None) + component_template.get("template").get("code")["value"] = component_code + + invalid_menu[menu_name][component_name] = component_template + + except Exception as exc: + logger.error( + f"Error while creating custom component [{component_name}]: {str(exc)}" + ) + + return invalid_menu + + +def build_langchain_custom_component_list_from_path(path: str): + """Build a list of custom components for the langchain from a given path""" + file_list = load_files_from_path(path) + reader = DirectoryReader(path, False) + + valid_components, invalid_components = build_and_validate_all_files( + reader, file_list + ) + + valid_menu = build_valid_menu(valid_components) + invalid_menu = build_invalid_menu(invalid_components) + + return merge_nested_dicts(valid_menu, invalid_menu) diff --git a/src/backend/langflow/main.py b/src/backend/langflow/main.py index 31878f851f..43a63d5649 100644 --- a/src/backend/langflow/main.py +++ b/src/backend/langflow/main.py @@ -8,10 +8,12 @@ from langflow.api import router from langflow.database.base import create_db_and_tables from langflow.interface.utils import setup_llm_caching +from langflow.utils.logger import configure def create_app(): """Create the FastAPI app and include the router.""" + configure() app = FastAPI() @@ -78,10 +80,16 @@ def setup_app(static_files_dir: Optional[Path] = None) -> FastAPI: return app -app = create_app() - - if __name__ == "__main__": import uvicorn - - uvicorn.run(app, host="127.0.0.1", port=7860) + from langflow.utils.util import get_number_of_workers + + configure() + uvicorn.run( + create_app, + host="127.0.0.1", + port=7860, + workers=get_number_of_workers(), + log_level="debug", + reload=True, + ) diff --git a/src/backend/langflow/processing/base.py b/src/backend/langflow/processing/base.py index f8690bbdfe..f1d7b6e568 100644 --- a/src/backend/langflow/processing/base.py +++ b/src/backend/langflow/processing/base.py @@ -22,7 +22,7 @@ async def get_result_and_steps(langchain_object, inputs: Union[dict, str], **kwa try: fix_memory_inputs(langchain_object) except Exception as exc: - logger.error(exc) + logger.error(f"Error fixing memory inputs: {exc}") try: async_callbacks = [AsyncStreamingLLMCallbackHandler(**kwargs)] diff --git a/src/backend/langflow/processing/process.py b/src/backend/langflow/processing/process.py index 03e6e4c352..8cefb1f44b 100644 --- a/src/backend/langflow/processing/process.py +++ b/src/backend/langflow/processing/process.py @@ -85,12 +85,17 @@ def get_input_str_if_only_one_input(inputs: dict) -> Optional[str]: return list(inputs.values())[0] if len(inputs) == 1 else None -def process_graph_cached(data_graph: Dict[str, Any], inputs: Optional[dict] = None): +def process_graph_cached( + data_graph: Dict[str, Any], inputs: Optional[dict] = None, clear_cache=False +): """ Process graph by extracting input variables and replacing ZeroShotPrompt with PromptTemplate,then run the graph and return the result and thought. """ # Load langchain object + if clear_cache: + build_sorted_vertices_with_caching.clear_cache() + logger.debug("Cleared cache") langchain_object, artifacts = build_sorted_vertices_with_caching(data_graph) logger.debug("Loaded LangChain object") if inputs is None: diff --git a/src/backend/langflow/settings.py b/src/backend/langflow/settings.py index 9e6c600829..e9c62d6f7c 100644 --- a/src/backend/langflow/settings.py +++ b/src/backend/langflow/settings.py @@ -1,10 +1,13 @@ import os -from typing import Optional +from typing import Optional, List +from pathlib import Path import yaml from pydantic import BaseSettings, root_validator from langflow.utils.logger import logger +BASE_COMPONENTS_PATH = Path(__file__).parent / "components" + class Settings(BaseSettings): chains: dict = {} @@ -22,13 +25,16 @@ class Settings(BaseSettings): textsplitters: dict = {} utilities: dict = {} output_parsers: dict = {} + custom_components: dict = {} + dev: bool = False database_url: Optional[str] = None cache: str = "InMemoryCache" remove_api_keys: bool = False + components_path: List[Path] @root_validator(pre=True) - def set_database_url(cls, values): + def set_env_variables(cls, values): if "database_url" not in values: logger.debug( "No database_url provided, trying LANGFLOW_DATABASE_URL env variable" @@ -38,6 +44,19 @@ def set_database_url(cls, values): else: logger.debug("No DATABASE_URL env variable, using sqlite database") values["database_url"] = "sqlite:///./langflow.db" + + if not values.get("components_path"): + values["components_path"] = [BASE_COMPONENTS_PATH] + elif BASE_COMPONENTS_PATH not in values["components_path"]: + values["components_path"].append(BASE_COMPONENTS_PATH) + + if os.getenv("LANGFLOW_COMPONENT_PATH"): + langflow_component_path = Path(os.getenv("LANGFLOW_COMPONENT_PATH")) + if ( + langflow_component_path.exists() + and langflow_component_path not in values["components_path"] + ): + values["components_path"].append(langflow_component_path) return values class Config: @@ -68,12 +87,20 @@ def update_from_yaml(self, file_path: str, dev: bool = False): self.documentloaders = new_settings.documentloaders or {} self.retrievers = new_settings.retrievers or {} self.output_parsers = new_settings.output_parsers or {} + self.custom_components = new_settings.custom_components or {} + self.components_path = new_settings.components_path or [] self.dev = dev def update_settings(self, **kwargs): for key, value in kwargs.items(): if hasattr(self, key): - setattr(self, key, value) + if isinstance(getattr(self, key), list): + if isinstance(value, list): + getattr(self, key).extend(value) + else: + getattr(self, key).append(value) + else: + setattr(self, key, value) def save_settings_to_yaml(settings: Settings, file_path: str): diff --git a/src/backend/langflow/template/field/base.py b/src/backend/langflow/template/field/base.py index a747ad3225..31c68d0940 100644 --- a/src/backend/langflow/template/field/base.py +++ b/src/backend/langflow/template/field/base.py @@ -6,23 +6,58 @@ class TemplateFieldCreator(BaseModel, ABC): field_type: str = "str" + """The type of field this is. Default is a string.""" + required: bool = False + """Specifies if the field is required. Defaults to False.""" + placeholder: str = "" + """A placeholder string for the field. Default is an empty string.""" + is_list: bool = False + """Defines if the field is a list. Default is False.""" + show: bool = True + """Should the field be shown. Defaults to True.""" + multiline: bool = False + """Defines if the field will allow the user to open a text editor. Default is False.""" + value: Any = None + """The value of the field. Default is None.""" + suffixes: list[str] = [] - fileTypes: list[str] = [] + """List of suffixes for a file field. Default is an empty list.""" + file_types: list[str] = [] + """List of file types associated with the field. Default is an empty list. (duplicate)""" + file_path: Union[str, None] = None + """The file path of the field if it is a file. Defaults to None.""" + password: bool = False + """Specifies if the field is a password. Defaults to False.""" + options: list[str] = [] + """List of options for the field. Only used when is_list=True. Default is an empty list.""" + name: str = "" + """Name of the field. Default is an empty string.""" + display_name: Optional[str] = None + """Display name of the field. Defaults to None.""" + advanced: bool = False + """Specifies if the field will an advanced parameter (hidden). Defaults to False.""" + input_types: list[str] = [] + """List of input types for the handle when the field has more than one type. Default is an empty list.""" + + dynamic: bool = False + """Specifies if the field is dynamic. Defaults to False.""" + info: Optional[str] = "" + """Additional information about the field to be shown in the tooltip. Defaults to an empty string.""" def to_dict(self): result = self.dict() diff --git a/src/backend/langflow/template/frontend_node/__init__.py b/src/backend/langflow/template/frontend_node/__init__.py index c362343640..e13aa1ded1 100644 --- a/src/backend/langflow/template/frontend_node/__init__.py +++ b/src/backend/langflow/template/frontend_node/__init__.py @@ -9,6 +9,7 @@ vectorstores, documentloaders, textsplitters, + custom_components, ) __all__ = [ @@ -22,4 +23,5 @@ "vectorstores", "documentloaders", "textsplitters", + "custom_components", ] diff --git a/src/backend/langflow/template/frontend_node/agents.py b/src/backend/langflow/template/frontend_node/agents.py index 02aea78b99..63c8a4d5ec 100644 --- a/src/backend/langflow/template/frontend_node/agents.py +++ b/src/backend/langflow/template/frontend_node/agents.py @@ -145,7 +145,7 @@ class CSVAgentNode(FrontendNode): name="path", value="", suffixes=[".csv"], - fileTypes=["csv"], + file_types=["csv"], ), TemplateField( field_type="BaseLanguageModel", diff --git a/src/backend/langflow/template/frontend_node/base.py b/src/backend/langflow/template/frontend_node/base.py index 7dae454635..fe19b56529 100644 --- a/src/backend/langflow/template/frontend_node/base.py +++ b/src/backend/langflow/template/frontend_node/base.py @@ -5,13 +5,14 @@ from pydantic import BaseModel, Field from langflow.template.frontend_node.formatter import field_formatters -from langflow.template.frontend_node.constants import FORCE_SHOW_FIELDS +from langflow.template.frontend_node.constants import ( + CLASSES_TO_REMOVE, + FORCE_SHOW_FIELDS, +) from langflow.template.field.base import TemplateField from langflow.template.template.base import Template from langflow.utils import constants -CLASSES_TO_REMOVE = ["Serializable", "BaseModel", "object"] - class FieldFormatters(BaseModel): formatters = { @@ -51,14 +52,8 @@ class FrontendNode(BaseModel): custom_fields: defaultdict = defaultdict(list) output_types: List[str] = [] field_formatters: FieldFormatters = Field(default_factory=FieldFormatters) - - def process_base_classes(self) -> None: - """Removes unwanted base classes from the list of base classes.""" - self.base_classes = [ - base_class - for base_class in self.base_classes - if base_class not in CLASSES_TO_REMOVE - ] + beta: bool = False + error: Optional[str] = None # field formatters is an instance attribute but it is not used in the class # so we need to create a method to get it @@ -70,6 +65,14 @@ def set_documentation(self, documentation: str) -> None: """Sets the documentation of the frontend node.""" self.documentation = documentation + def process_base_classes(self) -> None: + """Removes unwanted base classes from the list of base classes.""" + self.base_classes = [ + base_class + for base_class in self.base_classes + if base_class not in CLASSES_TO_REMOVE + ] + def to_dict(self) -> dict: """Returns a dict representation of the frontend node.""" self.process_base_classes() @@ -82,6 +85,8 @@ def to_dict(self) -> dict: "custom_fields": self.custom_fields, "output_types": self.output_types, "documentation": self.documentation, + "beta": self.beta, + "error": self.error, }, } diff --git a/src/backend/langflow/template/frontend_node/constants.py b/src/backend/langflow/template/frontend_node/constants.py index 513ccd1ef9..3cf5dfffdc 100644 --- a/src/backend/langflow/template/frontend_node/constants.py +++ b/src/backend/langflow/template/frontend_node/constants.py @@ -63,3 +63,6 @@ INPUT_KEY_INFO = """The variable to be used as Chat Input when more than one variable is available.""" OUTPUT_KEY_INFO = """The variable to be used as Chat Output (e.g. answer in a ConversationalRetrievalChain)""" + + +CLASSES_TO_REMOVE = ["Serializable", "BaseModel", "object"] diff --git a/src/backend/langflow/template/frontend_node/custom_components.py b/src/backend/langflow/template/frontend_node/custom_components.py new file mode 100644 index 0000000000..4f36a1c9fb --- /dev/null +++ b/src/backend/langflow/template/frontend_node/custom_components.py @@ -0,0 +1,31 @@ +from langflow.template.field.base import TemplateField +from langflow.template.frontend_node.base import FrontendNode +from langflow.template.template.base import Template +from langflow.interface.custom.constants import DEFAULT_CUSTOM_COMPONENT_CODE + + +class CustomComponentFrontendNode(FrontendNode): + name: str = "CustomComponent" + display_name: str = "Custom Component" + beta: bool = True + template: Template = Template( + type_name="CustomComponent", + fields=[ + TemplateField( + field_type="code", + required=True, + placeholder="", + is_list=False, + show=True, + value=DEFAULT_CUSTOM_COMPONENT_CODE, + name="code", + advanced=False, + dynamic=True, + ) + ], + ) + description: str = "Create any custom component you want!" + base_classes: list[str] = [] + + def to_dict(self): + return super().to_dict() diff --git a/src/backend/langflow/template/frontend_node/documentloaders.py b/src/backend/langflow/template/frontend_node/documentloaders.py index d775d87365..bb78d8855c 100644 --- a/src/backend/langflow/template/frontend_node/documentloaders.py +++ b/src/backend/langflow/template/frontend_node/documentloaders.py @@ -14,7 +14,7 @@ def build_file_field( name=name, value="", suffixes=suffixes, - fileTypes=fileTypes, + file_types=fileTypes, ) diff --git a/src/backend/langflow/template/frontend_node/llms.py b/src/backend/langflow/template/frontend_node/llms.py index de0fa3c0b0..a6a128cfed 100644 --- a/src/backend/langflow/template/frontend_node/llms.py +++ b/src/backend/langflow/template/frontend_node/llms.py @@ -19,7 +19,7 @@ def add_extra_fields(self) -> None: name="credentials", value="", suffixes=[".json"], - fileTypes=["json"], + file_types=["json"], ) ) diff --git a/src/backend/langflow/template/frontend_node/tools.py b/src/backend/langflow/template/frontend_node/tools.py index ece765ed78..579b32da3d 100644 --- a/src/backend/langflow/template/frontend_node/tools.py +++ b/src/backend/langflow/template/frontend_node/tools.py @@ -1,7 +1,9 @@ from langflow.template.field.base import TemplateField from langflow.template.frontend_node.base import FrontendNode from langflow.template.template.base import Template -from langflow.utils.constants import DEFAULT_PYTHON_FUNCTION +from langflow.utils.constants import ( + DEFAULT_PYTHON_FUNCTION, +) class ToolNode(FrontendNode): diff --git a/src/backend/langflow/utils/constants.py b/src/backend/langflow/utils/constants.py index 44103c2b7a..e473d855b7 100644 --- a/src/backend/langflow/utils/constants.py +++ b/src/backend/langflow/utils/constants.py @@ -17,18 +17,29 @@ ] ANTHROPIC_MODELS = [ - "claude-v1", # largest model, ideal for a wide range of more complex tasks. - "claude-v1-100k", # An enhanced version of claude-v1 with a 100,000 token (roughly 75,000 word) context window. - "claude-instant-v1", # A smaller model with far lower latency, sampling at roughly 40 words/sec! - "claude-instant-v1-100k", # Like claude-instant-v1 with a 100,000 token context window but retains its performance. + # largest model, ideal for a wide range of more complex tasks. + "claude-v1", + # An enhanced version of claude-v1 with a 100,000 token (roughly 75,000 word) context window. + "claude-v1-100k", + # A smaller model with far lower latency, sampling at roughly 40 words/sec! + "claude-instant-v1", + # Like claude-instant-v1 with a 100,000 token context window but retains its performance. + "claude-instant-v1-100k", # Specific sub-versions of the above models: - "claude-v1.3", # Vs claude-v1.2: better instruction-following, code, and non-English dialogue and writing. - "claude-v1.3-100k", # An enhanced version of claude-v1.3 with a 100,000 token (roughly 75,000 word) context window. - "claude-v1.2", # Vs claude-v1.1: small adv in general helpfulness, instruction following, coding, and other tasks. - "claude-v1.0", # An earlier version of claude-v1. - "claude-instant-v1.1", # Latest version of claude-instant-v1. Better than claude-instant-v1.0 at most tasks. - "claude-instant-v1.1-100k", # Version of claude-instant-v1.1 with a 100K token context window. - "claude-instant-v1.0", # An earlier version of claude-instant-v1. + # Vs claude-v1.2: better instruction-following, code, and non-English dialogue and writing. + "claude-v1.3", + # An enhanced version of claude-v1.3 with a 100,000 token (roughly 75,000 word) context window. + "claude-v1.3-100k", + # Vs claude-v1.1: small adv in general helpfulness, instruction following, coding, and other tasks. + "claude-v1.2", + # An earlier version of claude-v1. + "claude-v1.0", + # Latest version of claude-instant-v1. Better than claude-instant-v1.0 at most tasks. + "claude-instant-v1.1", + # Version of claude-instant-v1.1 with a 100K token context window. + "claude-instant-v1.1-100k", + # An earlier version of claude-instant-v1. + "claude-instant-v1.0", ] DEFAULT_PYTHON_FUNCTION = """ @@ -36,4 +47,5 @@ def python_function(text: str) -> str: \"\"\"This is a default python function that returns the input text\"\"\" return text """ + DIRECT_TYPES = ["str", "bool", "code", "int", "float", "Any", "prompt"] diff --git a/src/backend/langflow/utils/logger.py b/src/backend/langflow/utils/logger.py index b70a451d43..deb0f75ca7 100644 --- a/src/backend/langflow/utils/logger.py +++ b/src/backend/langflow/utils/logger.py @@ -6,7 +6,7 @@ logger = logging.getLogger("langflow") -def configure(log_level: str = "INFO", log_file: Path = None): # type: ignore +def configure(log_level: str = "DEBUG", log_file: Path = None): # type: ignore log_format = "%(asctime)s - %(levelname)s - %(message)s" log_level_value = getattr(logging, log_level.upper(), logging.INFO) diff --git a/src/backend/langflow/utils/types.py b/src/backend/langflow/utils/types.py new file mode 100644 index 0000000000..3657d550e7 --- /dev/null +++ b/src/backend/langflow/utils/types.py @@ -0,0 +1,2 @@ +class Prompt: + pass diff --git a/src/backend/langflow/utils/util.py b/src/backend/langflow/utils/util.py index c5db6052e8..f68c9dbe29 100644 --- a/src/backend/langflow/utils/util.py +++ b/src/backend/langflow/utils/util.py @@ -1,13 +1,15 @@ -import importlib -import inspect import re +import inspect +import importlib from functools import wraps -from typing import Dict, Optional +from typing import Optional, Dict, Any, Union from docstring_parser import parse # type: ignore from langflow.template.frontend_node.constants import FORCE_SHOW_FIELDS from langflow.utils import constants +from langflow.utils.logger import logger +from multiprocess import cpu_count # type: ignore def build_template_from_function( @@ -214,138 +216,249 @@ def get_default_factory(module: str, function: str): return None -def format_dict(d, name: Optional[str] = None): +def update_verbose(d: dict, new_value: bool) -> dict: """ - Formats a dictionary by removing certain keys and modifying the - values of other keys. + Recursively updates the value of the 'verbose' key in a dictionary. Args: - d: the dictionary to format - name: the name of the class to format + d: the dictionary to update + new_value: the new value to set + + Returns: + The updated dictionary. + """ + + for k, v in d.items(): + if isinstance(v, dict): + update_verbose(v, new_value) + elif k == "verbose": + d[k] = new_value + return d + + +def sync_to_async(func): + """ + Decorator to convert a sync function to an async function. + """ + + @wraps(func) + async def async_wrapper(*args, **kwargs): + return func(*args, **kwargs) + + return async_wrapper + + +def format_dict( + dictionary: Dict[str, Any], class_name: Optional[str] = None +) -> Dict[str, Any]: + """ + Formats a dictionary by removing certain keys and modifying the + values of other keys. Returns: A new dictionary with the desired modifications applied. """ - # Process remaining keys - for key, value in d.items(): + for key, value in dictionary.items(): if key == "_type": continue - _type = value["type"] + _type: Union[str, type] = get_type(value) - if not isinstance(_type, str): - _type = _type.__name__ + _type = remove_optional_wrapper(_type) + _type = check_list_type(_type, value) + _type = replace_mapping_with_dict(_type) - # Remove 'Optional' wrapper - if "Optional" in _type: - _type = _type.replace("Optional[", "")[:-1] + value["type"] = get_formatted_type(key, _type) + value["show"] = should_show_field(value, key) + value["password"] = is_password_field(key) + value["multiline"] = is_multiline_field(key) - # Check for list type - if "List" in _type or "Sequence" in _type or "Set" in _type: - _type = ( - _type.replace("List[", "") - .replace("Sequence[", "") - .replace("Set[", "")[:-1] - ) - value["list"] = True - else: - value["list"] = False + replace_dict_type_with_code(value) - # Replace 'Mapping' with 'dict' - if "Mapping" in _type: - _type = _type.replace("Mapping", "dict") + if key == "dict_": + set_dict_file_attributes(value) + + replace_default_value_with_actual(value) - # Change type from str to Tool - value["type"] = "Tool" if key in ["allowed_tools"] else _type + if key == "headers": + set_headers_value(value) - value["type"] = "int" if key in ["max_value_length"] else value["type"] + add_options_to_field(value, class_name, key) - # Show or not field - value["show"] = bool( - (value["required"] and key not in ["input_variables"]) - or key in FORCE_SHOW_FIELDS - or "api_key" in key - ) + return dictionary + + +def get_type(value: Any) -> Union[str, type]: + """ + Retrieves the type value from the dictionary. + + Returns: + The type value. + """ + _type = value["type"] + + return _type if isinstance(_type, str) else _type.__name__ + + +def remove_optional_wrapper(_type: Union[str, type]) -> str: + """ + Removes the 'Optional' wrapper from the type string. + + Returns: + The type string with the 'Optional' wrapper removed. + """ + if isinstance(_type, type): + _type = str(_type) + if "Optional" in _type: + _type = _type.replace("Optional[", "")[:-1] + + return _type + + +def check_list_type(_type: str, value: Dict[str, Any]) -> str: + """ + Checks if the type is a list type and modifies the value accordingly. - # Add password field - value["password"] = any( - text in key.lower() for text in ["password", "token", "api", "key"] + Returns: + The modified type string. + """ + if any(list_type in _type for list_type in ["List", "Sequence", "Set"]): + _type = ( + _type.replace("List[", "").replace("Sequence[", "").replace("Set[", "")[:-1] ) + value["list"] = True + else: + value["list"] = False - # Add multline - value["multiline"] = key in [ - "suffix", - "prefix", - "template", - "examples", - "code", - "headers", - "format_instructions", - ] - - # Replace dict type with str - if "dict" in value["type"].lower(): - value["type"] = "code" + return _type - if key == "dict_": - value["type"] = "file" - value["suffixes"] = [".json", ".yaml", ".yml"] - value["fileTypes"] = ["json", "yaml", "yml"] - # Replace default value with actual value - if "default" in value: - value["value"] = value["default"] - value.pop("default") +def replace_mapping_with_dict(_type: str) -> str: + """ + Replaces 'Mapping' with 'dict' in the type string. - if key == "headers": - value[ - "value" - ] = """{'Authorization': - 'Bearer '}""" - # Add options to openai - if name == "OpenAI" and key == "model_name": - value["options"] = constants.OPENAI_MODELS - value["list"] = True - value["value"] = constants.OPENAI_MODELS[0] - elif name == "ChatOpenAI" and key == "model_name": - value["options"] = constants.CHAT_OPENAI_MODELS - value["list"] = True - value["value"] = constants.CHAT_OPENAI_MODELS[0] - elif (name == "Anthropic" or name == "ChatAnthropic") and key == "model_name": - value["options"] = constants.ANTHROPIC_MODELS - value["list"] = True - value["value"] = constants.ANTHROPIC_MODELS[0] - return d + Returns: + The modified type string. + """ + if "Mapping" in _type: + _type = _type.replace("Mapping", "dict") + return _type -def update_verbose(d: dict, new_value: bool) -> dict: + +def get_formatted_type(key: str, _type: str) -> str: """ - Recursively updates the value of the 'verbose' key in a dictionary. + Formats the type value based on the given key. - Args: - d: the dictionary to update - new_value: the new value to set + Returns: + The formatted type value. + """ + if key == "allowed_tools": + return "Tool" + + elif key == "max_value_length": + return "int" + + return _type + + +def should_show_field(value: Dict[str, Any], key: str) -> bool: + """ + Determines if the field should be shown or not. Returns: - The updated dictionary. + True if the field should be shown, False otherwise. """ + return ( + (value["required"] and key != "input_variables") + or key in FORCE_SHOW_FIELDS + or any(text in key.lower() for text in ["password", "token", "api", "key"]) + ) - for k, v in d.items(): - if isinstance(v, dict): - update_verbose(v, new_value) - elif k == "verbose": - d[k] = new_value - return d +def is_password_field(key: str) -> bool: + """ + Determines if the field is a password field. -def sync_to_async(func): + Returns: + True if the field is a password field, False otherwise. """ - Decorator to convert a sync function to an async function. + return any(text in key.lower() for text in ["password", "token", "api", "key"]) + + +def is_multiline_field(key: str) -> bool: """ + Determines if the field is a multiline field. - @wraps(func) - async def async_wrapper(*args, **kwargs): - return func(*args, **kwargs) + Returns: + True if the field is a multiline field, False otherwise. + """ + return key in { + "suffix", + "prefix", + "template", + "examples", + "code", + "headers", + "format_instructions", + } + + +def replace_dict_type_with_code(value: Dict[str, Any]) -> None: + """ + Replaces the type value with 'code' if the type is a dict. + """ + if "dict" in value["type"].lower(): + value["type"] = "code" - return async_wrapper + +def set_dict_file_attributes(value: Dict[str, Any]) -> None: + """ + Sets the file attributes for the 'dict_' key. + """ + value["type"] = "file" + value["suffixes"] = [".json", ".yaml", ".yml"] + value["fileTypes"] = ["json", "yaml", "yml"] + + +def replace_default_value_with_actual(value: Dict[str, Any]) -> None: + """ + Replaces the default value with the actual value. + """ + if "default" in value: + value["value"] = value["default"] + value.pop("default") + + +def set_headers_value(value: Dict[str, Any]) -> None: + """ + Sets the value for the 'headers' key. + """ + value["value"] = """{'Authorization': 'Bearer '}""" + + +def add_options_to_field( + value: Dict[str, Any], class_name: Optional[str], key: str +) -> None: + """ + Adds options to the field based on the class name and key. + """ + options_map = { + "OpenAI": constants.OPENAI_MODELS, + "ChatOpenAI": constants.CHAT_OPENAI_MODELS, + "Anthropic": constants.ANTHROPIC_MODELS, + "ChatAnthropic": constants.ANTHROPIC_MODELS, + } + + if class_name in options_map and key == "model_name": + value["options"] = options_map[class_name] + value["list"] = True + value["value"] = options_map[class_name][0] + + +def get_number_of_workers(workers=None): + if workers == -1 or workers is None: + workers = (cpu_count() * 2) + 1 + logger.debug(f"Number of workers: {workers}") + return workers diff --git a/src/backend/langflow/utils/validate.py b/src/backend/langflow/utils/validate.py index 905b9dd44a..f8a9c1d1d7 100644 --- a/src/backend/langflow/utils/validate.py +++ b/src/backend/langflow/utils/validate.py @@ -163,9 +163,77 @@ def wrapped_function(*args, **kwargs): return wrapped_function +def create_class(code, class_name): + if not hasattr(ast, "TypeIgnore"): + + class TypeIgnore(ast.AST): + _fields = () + + ast.TypeIgnore = TypeIgnore + + module = ast.parse(code) + exec_globals = globals().copy() + + for node in module.body: + if isinstance(node, ast.Import): + for alias in node.names: + try: + exec_globals[alias.asname or alias.name] = importlib.import_module( + alias.name + ) + except ModuleNotFoundError as e: + raise ModuleNotFoundError( + f"Module {alias.name} not found. Please install it and try again." + ) from e + elif isinstance(node, ast.ImportFrom): + try: + imported_module = importlib.import_module(node.module) + for alias in node.names: + exec_globals[alias.name] = getattr(imported_module, alias.name) + except ModuleNotFoundError as e: + raise ModuleNotFoundError( + f"Module {node.module} not found. Please install it and try again." + ) from e + + class_code = next( + node + for node in module.body + if isinstance(node, ast.ClassDef) and node.name == class_name + ) + class_code.parent = None + code_obj = compile( + ast.Module(body=[class_code], type_ignores=[]), "", "exec" + ) + # This suppresses import errors + # with contextlib.suppress(Exception): + exec(code_obj, exec_globals, locals()) + exec_globals[class_name] = locals()[class_name] + + # Return a function that imports necessary modules and creates an instance of the target class + def build_my_class(*args, **kwargs): + for module_name, module in exec_globals.items(): + if isinstance(module, type(importlib)): + globals()[module_name] = module + + instance = exec_globals[class_name](*args, **kwargs) + return instance + + build_my_class.__globals__.update(exec_globals) + + return build_my_class + + def extract_function_name(code): module = ast.parse(code) for node in module.body: if isinstance(node, ast.FunctionDef): return node.name raise ValueError("No function definition found in the code string") + + +def extract_class_name(code): + module = ast.parse(code) + for node in module.body: + if isinstance(node, ast.ClassDef): + return node.name + raise ValueError("No class definition found in the code string") diff --git a/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx b/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx index cfa9b8b92f..ad31fa8655 100644 --- a/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx +++ b/src/frontend/src/CustomNodes/GenericNode/components/parameterComponent/index.tsx @@ -13,7 +13,7 @@ import IntComponent from "../../../../components/intComponent"; import PromptAreaComponent from "../../../../components/promptComponent"; import TextAreaComponent from "../../../../components/textAreaComponent"; import ToggleShadComponent from "../../../../components/toggleShadComponent"; -import { MAX_LENGTH_TO_SCROLL_TOOLTIP } from "../../../../constants/constants"; +import { TOOLTIP_EMPTY } from "../../../../constants/constants"; import { TabsContext } from "../../../../contexts/tabsContext"; import { typesContext } from "../../../../contexts/typesContext"; import { ParameterComponentType } from "../../../../types/components"; @@ -23,11 +23,7 @@ import { nodeIconsLucide, nodeNames, } from "../../../../utils/styleUtils"; -import { - classNames, - getRandomKeyByssmm, - groupByFamily, -} from "../../../../utils/utils"; +import { classNames, groupByFamily } from "../../../../utils/utils"; export default function ParameterComponent({ left, @@ -45,11 +41,12 @@ export default function ParameterComponent({ }: ParameterComponentType) { const ref = useRef(null); const refHtml = useRef(null); - const refNumberComponents = useRef(0); const infoHtml = useRef(null); const updateNodeInternals = useUpdateNodeInternals(); const [position, setPosition] = useState(0); - const { setTabsState, tabId, save } = useContext(TabsContext); + const { setTabsState, tabId, save, flows } = useContext(TabsContext); + + const flow = flows.find((f) => f.id === tabId).data?.nodes ?? null; // Update component position useEffect(() => { @@ -80,9 +77,11 @@ export default function ParameterComponent({ [tabId]: { ...prev[tabId], isPending: true, + formKeysData: prev[tabId].formKeysData, }, }; }); + renderTooltips(); }; useEffect(() => { @@ -98,57 +97,65 @@ export default function ParameterComponent({ ); }, [info]); - useEffect(() => { - const groupedObj = groupByFamily(myData, tooltipTitle, left, data.type); - - refNumberComponents.current = groupedObj[0]?.type?.length; + function renderTooltips() { + let groupedObj = groupByFamily(myData, tooltipTitle, left, flow); - refHtml.current = groupedObj.map((item, i) => { - const Icon: any = nodeIconsLucide[item.family]; + if (groupedObj && groupedObj.length > 0) { + refHtml.current = groupedObj.map((item, i) => { + const Icon: any = + nodeIconsLucide[item.family] ?? nodeIconsLucide["unknown"]; - return ( - 0 ? "mt-2 flex items-center" : "flex items-center" - )} - > -
0 ? "mt-2 flex items-center" : "flex items-center" + )} > - -
- - {nodeNames[item.family] ?? ""}{" "} - - {" "} - {item.type === "" ? "" : " - "} - {item.type.split(", ").length > 2 - ? item.type.split(", ").map((el, i) => ( - - - {i === item.type.split(", ").length - 1 - ? el - : (el += `, `)} - - - )) - : item.type} + > + +
+ + {nodeNames[item.family] ?? "Unknown"} + + {" "} + {item.type === "" ? "" : " - "} + {item.type.split(", ").length > 2 + ? item.type.split(", ").map((el, i) => ( + + + {i === item.type.split(", ").length - 1 + ? el + : (el += `, `)} + + + )) + : item.type} + - - ); - }); - }, [tooltipTitle]); + ); + }); + } else { + refHtml.current = {TOOLTIP_EMPTY}; + } + } + + useEffect(() => { + renderTooltips(); + }, [tooltipTitle, flow]); + return (
) : ( MAX_LENGTH_TO_SCROLL_TOOLTIP - ? "tooltip-fixed-width custom-scroll overflow-y-scroll nowheel" - : "tooltip-fixed-width" - } + styleClasses={"tooltip-fixed-width custom-scroll nowheel"} delayDuration={0} content={refHtml.current} side={left ? "left" : "right"} @@ -252,7 +255,7 @@ export default function ParameterComponent({
{ handleOnNewValue(t); }} @@ -280,6 +283,7 @@ export default function ParameterComponent({ ) : left === true && type === "code" ? (
{ data.node = nodeClass; }} diff --git a/src/frontend/src/CustomNodes/GenericNode/index.tsx b/src/frontend/src/CustomNodes/GenericNode/index.tsx index 1aff91f0be..d72e46d23a 100644 --- a/src/frontend/src/CustomNodes/GenericNode/index.tsx +++ b/src/frontend/src/CustomNodes/GenericNode/index.tsx @@ -1,11 +1,10 @@ import { cloneDeep } from "lodash"; -import { useContext, useEffect, useRef, useState } from "react"; +import { useContext, useEffect, useState } from "react"; import { NodeToolbar, useUpdateNodeInternals } from "reactflow"; import ShadTooltip from "../../components/ShadTooltipComponent"; import Tooltip from "../../components/TooltipComponent"; import IconComponent from "../../components/genericIconComponent"; import { useSSE } from "../../contexts/SSEContext"; -import { alertContext } from "../../contexts/alertContext"; import { TabsContext } from "../../contexts/tabsContext"; import { typesContext } from "../../contexts/typesContext"; import NodeToolbarComponent from "../../pages/FlowPage/components/nodeToolbarComponent"; @@ -23,14 +22,9 @@ export default function GenericNode({ selected: boolean; }) { const [data, setData] = useState(olddata); - const { setErrorData } = useContext(alertContext); const { updateFlow, flows, tabId } = useContext(TabsContext); const updateNodeInternals = useUpdateNodeInternals(); - const showError = useRef(true); const { types, deleteNode, reactFlowInstance } = useContext(typesContext); - // any to avoid type conflict - const Icon: any = - nodeIconsLucide[data.type] || nodeIconsLucide[types[data.type]]; const name = nodeIconsLucide[data.type] ? data.type : types[data.type]; const [validationStatus, setValidationStatus] = useState(null); // State for outline color @@ -67,18 +61,6 @@ export default function GenericNode({ } }, [sseData, data.id]); - if (!Icon) { - if (showError.current) { - setErrorData({ - title: data.type - ? `The ${data.type} node could not be rendered, please review your json file` - : "There was a node that can't be rendered, please review your json file", - }); - showError.current = false; - } - deleteNode(data.id); - return; - } return ( <> @@ -95,6 +77,11 @@ export default function GenericNode({ "generic-node-div" )} > + {data.node.beta && ( +
+
BETA
+
+ )}
) : (
- {validationStatus.params + {typeof validationStatus.params === "string" ? validationStatus.params .split("\n") .map((line, index) =>
{line}
) @@ -178,6 +165,14 @@ export default function GenericNode({ {data.node.template[t].show && !data.node.template[t].advanced ? ( ; tabId: string; + invalidName: boolean; + setInvalidName: (invalidName: boolean) => void; setName: (name: string) => void; setDescription: (description: string) => void; updateFlow: (flow: { id: string; name: string }) => void; @@ -16,6 +19,8 @@ type InputProps = { export const EditFlowSettings: React.FC = ({ name, + invalidName, + setInvalidName, description, maxLength = 50, flows, @@ -25,6 +30,14 @@ export const EditFlowSettings: React.FC = ({ updateFlow, }) => { const [isMaxLength, setIsMaxLength] = useState(false); + const nameLists = useRef([]); + useEffect(() => { + readFlowsFromDatabase().then((flows) => { + flows.forEach((flow) => { + nameLists.current.push(flow.name); + }); + }); + }, []); const handleNameChange = (event: ChangeEvent) => { const { value } = event.target; @@ -33,7 +46,11 @@ export const EditFlowSettings: React.FC = ({ } else { setIsMaxLength(false); } - + if (!nameLists.current.includes(value)) { + setInvalidName(false); + } else { + setInvalidName(true); + } setName(value); }; @@ -55,6 +72,9 @@ export const EditFlowSettings: React.FC = ({ {isMaxLength && ( Character limit reached )} + {invalidName && ( + Name already in use + )}
{ const handleKeyDown = (event: KeyboardEvent) => { diff --git a/src/frontend/src/components/codeAreaComponent/index.tsx b/src/frontend/src/components/codeAreaComponent/index.tsx index e95b03bcf5..05e8127da0 100644 --- a/src/frontend/src/components/codeAreaComponent/index.tsx +++ b/src/frontend/src/components/codeAreaComponent/index.tsx @@ -1,6 +1,6 @@ import { useEffect, useState } from "react"; import CodeAreaModal from "../../modals/codeAreaModal"; -import { TextAreaComponentType } from "../../types/components"; +import { CodeAreaComponentType } from "../../types/components"; import IconComponent from "../genericIconComponent"; @@ -10,8 +10,9 @@ export default function CodeAreaComponent({ disabled, editNode = false, nodeClass, + dynamic, setNodeClass, -}: TextAreaComponentType) { +}: CodeAreaComponentType) { const [myValue, setMyValue] = useState( typeof value == "string" ? value : JSON.stringify(value) ); @@ -29,6 +30,7 @@ export default function CodeAreaComponent({ return (
{myValue !== "" ? myValue : "Type something..."} diff --git a/src/frontend/src/components/codeTabsComponent/index.tsx b/src/frontend/src/components/codeTabsComponent/index.tsx index 1357e0c790..9ccdb38e44 100644 --- a/src/frontend/src/components/codeTabsComponent/index.tsx +++ b/src/frontend/src/components/codeTabsComponent/index.tsx @@ -145,7 +145,7 @@ export default function CodeTabsComponent({ ))} - {Number(activeTab) < 3 && ( + {Number(activeTab) < 4 && (
diff --git a/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/index.tsx b/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/index.tsx index 3111f74a22..5413d6ec92 100644 --- a/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/index.tsx +++ b/src/frontend/src/pages/FlowPage/components/extraSidebarComponent/index.tsx @@ -1,4 +1,4 @@ -import { useContext, useState } from "react"; +import { useContext, useEffect, useState } from "react"; import ShadTooltip from "../../../../components/ShadTooltipComponent"; import IconComponent from "../../../../components/genericIconComponent"; import { Input } from "../../../../components/ui/input"; @@ -18,8 +18,8 @@ import { classNames } from "../../../../utils/utils"; import DisclosureComponent from "../DisclosureComponent"; export default function ExtraSidebar() { - const { data } = useContext(typesContext); - const { flows, tabId, uploadFlow, tabsState, saveFlow } = + const { data, templates } = useContext(typesContext); + const { flows, tabId, uploadFlow, tabsState, saveFlow, isBuilt } = useContext(TabsContext); const { setSuccessData, setErrorData } = useContext(alertContext); const [dataFilter, setFilterData] = useState(data); @@ -57,56 +57,83 @@ export default function ExtraSidebar() { }); } const flow = flows.find((f) => f.id === tabId); + useEffect(() => { + // show components with error on load + let errors = []; + Object.keys(templates).forEach((component) => { + if (templates[component].error) { + errors.push(component); + } + }); + if (errors.length > 0) + setErrorData({ title: " Components with errors: ", list: errors }); + }, []); return (
- - - - - +
+ + + +
+
-
- -
-
- - - {flow && flow.data && ( - +
- +
-
- )} +
+ +
+ +
+ {flow && flow.data && ( + +
+ +
+
+ )} +
- - - - + onClick={(event) => { + saveFlow(flow); + setSuccessData({ title: "Changes saved successfully" }); + }} + > + + +
+
@@ -138,7 +165,7 @@ export default function ExtraSidebar() { Object.keys(dataFilter[d]).length > 0 ? (