diff --git a/.github/workflows/autoSyncMergedPullRequest.yml b/.github/workflows/autoSyncMergedPullRequest.yml deleted file mode 100644 index 0868c42e70f..00000000000 --- a/.github/workflows/autoSyncMergedPullRequest.yml +++ /dev/null @@ -1,33 +0,0 @@ -name: autoSyncMergedPullRequest -on: - pull_request_target: - types: - - closed - branches: ['main'] -jobs: - autoSyncMergedPullRequest: - if: github.event.pull_request.merged == true - runs-on: ubuntu-latest - permissions: - contents: write - steps: - - uses: actions/checkout@v4 - - name: Show PR info - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: | - echo The PR #${{ github.event.pull_request.number }} was merged on main branch! - - name: Repository Dispatch - uses: peter-evans/repository-dispatch@v3 - with: - token: ${{ secrets.AUTOSYNC_TOKEN }} - repository: ${{ secrets.AUTOSYNC_CH_URL }} - event-type: ${{ secrets.AUTOSYNC_PR_EVENT_TYPE }} - client-payload: >- - { - "ref": "${{ github.ref }}", - "prNumber": "${{ github.event.pull_request.number }}", - "prTitle": "${{ github.event.pull_request.title }}", - "prDescription": "", - "sha": "${{ github.sha }}" - } diff --git a/.github/workflows/autoSyncSingleCommit.yml b/.github/workflows/autoSyncSingleCommit.yml deleted file mode 100644 index 6a661c94681..00000000000 --- a/.github/workflows/autoSyncSingleCommit.yml +++ /dev/null @@ -1,36 +0,0 @@ -name: autoSyncSingleCommit -on: - push: - branches: - - main -jobs: - doNotAutoSyncSingleCommit: - if: github.event.commits[1] != null - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - name: IGNORE autoSyncSingleCommit - run: | - echo This single commit has came from a merged commit. We will ignore it. This case is handled in autoSyncMergedPullRequest workflow for merge commits comming from merged pull requests only! Beware, the regular merge commits are not handled by any workflow for the moment. - autoSyncSingleCommit: - if: github.event.commits[1] == null - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - name: autoSyncSingleCommit - env: - GITHUB_CONTEXT: ${{ toJSON(github) }} - run: | - echo Autosync a single commit with id: ${{ github.sha }} from openSource main branch towards cloud hosted version. - - name: Repository Dispatch - uses: peter-evans/repository-dispatch@v3 - with: - token: ${{ secrets.AUTOSYNC_TOKEN }} - repository: ${{ secrets.AUTOSYNC_CH_URL }} - event-type: ${{ secrets.AUTOSYNC_SC_EVENT_TYPE }} - client-payload: >- - { - "ref": "${{ github.ref }}", - "sha": "${{ github.sha }}", - "commitMessage": "${{ github.event.commits[0].id }}" - } diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index 5c58608e19c..741fdc91c56 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -3,6 +3,22 @@ name: Docker Image CI on: workflow_dispatch: inputs: + registry: + description: 'Container Registry to push the image to.' + type: choice + required: true + default: 'aws_ecr' + options: + - 'docker_hub' + - 'aws_ecr' + environment: + description: 'Environment to push the image to.' + required: true + default: 'dev' + type: choice + options: + - dev + - prod node_version: description: 'Node.js version to build this image with.' type: choice @@ -19,25 +35,57 @@ on: jobs: docker: runs-on: ubuntu-latest + environment: ${{ github.event.inputs.environment }} steps: + - name: Set default values + id: defaults + run: | + echo "registry=${{ github.event.inputs.registry || 'aws_ecr' }}" >> $GITHUB_OUTPUT + echo "node_version=${{ github.event.inputs.node_version || '20' }}" >> $GITHUB_OUTPUT + echo "tag_version=${{ github.event.inputs.tag_version || 'latest' }}" >> $GITHUB_OUTPUT + - name: Checkout uses: actions/checkout@v4.1.1 + - name: Set up QEMU uses: docker/setup-qemu-action@v3.0.0 + - name: Set up Docker Buildx uses: docker/setup-buildx-action@v3.0.0 + + # ------------------------ + # Login Steps (conditional) + # ------------------------ - name: Login to Docker Hub + if: steps.defaults.outputs.registry == 'docker_hub' uses: docker/login-action@v3 with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} + + - name: Configure AWS Credentials + if: steps.defaults.outputs.registry == 'aws_ecr' + uses: aws-actions/configure-aws-credentials@v3 + with: + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + aws-region: ${{ secrets.AWS_REGION }} + + - name: Login to Amazon ECR + if: steps.defaults.outputs.registry == 'aws_ecr' + uses: aws-actions/amazon-ecr-login@v1 + + # ------------------------- + # Build and push (conditional tags) + # ------------------------- - name: Build and push uses: docker/build-push-action@v5.3.0 with: context: . - file: ./docker/Dockerfile + file: Dockerfile build-args: | - NODE_VERSION=${{github.event.inputs.node_version}} + NODE_VERSION=${{ steps.defaults.outputs.node_version }} platforms: linux/amd64,linux/arm64 push: true - tags: flowiseai/flowise:${{github.event.inputs.tag_version}} + tags: | + ${{ steps.defaults.outputs.registry == 'docker_hub' && format('flowiseai/flowise:{0}', steps.defaults.outputs.tag_version) || format('{0}.dkr.ecr.{1}.amazonaws.com/flowise:{2}', secrets.AWS_ACCOUNT_ID, secrets.AWS_REGION, steps.defaults.outputs.tag_version) }} diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index f8d6fa6f927..1b7139d39ac 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -6,6 +6,7 @@ on: pull_request: branches: - '*' + workflow_dispatch: permissions: contents: read jobs: @@ -31,6 +32,8 @@ jobs: - run: pnpm install - run: pnpm lint - run: pnpm build + env: + NODE_OPTIONS: '--max_old_space_size=4096' - name: Cypress install run: pnpm cypress install - name: Install dependencies (Cypress Action) diff --git a/.github/workflows/test_docker_build.yml b/.github/workflows/test_docker_build.yml index a27cf22dd76..28b970cf850 100644 --- a/.github/workflows/test_docker_build.yml +++ b/.github/workflows/test_docker_build.yml @@ -8,13 +8,12 @@ on: pull_request: branches: - '*' - + workflow_dispatch: jobs: build: runs-on: ubuntu-latest env: PUPPETEER_SKIP_DOWNLOAD: true steps: - - uses: actions/checkout@v3 - + - uses: actions/checkout@v4 - run: docker build --no-cache -t flowise . diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 60735ef187d..bda3b5e119b 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -125,15 +125,11 @@ Flowise support different environment variables to configure your instance. You | PORT | The HTTP port Flowise runs on | Number | 3000 | | CORS_ORIGINS | The allowed origins for all cross-origin HTTP calls | String | | | IFRAME_ORIGINS | The allowed origins for iframe src embedding | String | | -| FLOWISE_USERNAME | Username to login | String | | -| FLOWISE_PASSWORD | Password to login | String | | | FLOWISE_FILE_SIZE_LIMIT | Upload File Size Limit | String | 50mb | | DEBUG | Print logs from components | Boolean | | | LOG_PATH | Location where log files are stored | String | `your-path/Flowise/logs` | | LOG_LEVEL | Different levels of logs | Enum String: `error`, `info`, `verbose`, `debug` | `info` | | LOG_JSON_SPACES | Spaces to beautify JSON logs | | 2 | -| APIKEY_STORAGE_TYPE | To store api keys on a JSON file or database. Default is `json` | Enum String: `json`, `db` | `json` | -| APIKEY_PATH | Location where api keys are saved when `APIKEY_STORAGE_TYPE` is `json` | String | `your-path/Flowise/packages/server` | | TOOL_FUNCTION_BUILTIN_DEP | NodeJS built-in modules to be used for Tool Function | String | | | TOOL_FUNCTION_EXTERNAL_DEP | External modules to be used for Tool Function | String | | | DATABASE_TYPE | Type of database to store the flowise data | Enum String: `sqlite`, `mysql`, `postgres` | `sqlite` | diff --git a/LICENSE.md b/LICENSE.md index 80800001864..68314426eaf 100644 --- a/LICENSE.md +++ b/LICENSE.md @@ -1,6 +1,14 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ +Copyright (c) 2023-present FlowiseAI, Inc. + +Portions of this software are licensed as follows: + +- All content that resides under https://github.com/FlowiseAI/Flowise/tree/main/packages/server/src/enterprise directory and files with explicit copyright notice such as [IdentityManager.ts](https://github.com/FlowiseAI/Flowise/tree/main/packages/server/src/IdentityManager.ts) are licensed under [Commercial License](https://github.com/FlowiseAI/Flowise/tree/main/packages/server/src/enterprise/LICENSE.md). +- All third party components incorporated into the FlowiseAI Software are licensed under the original license provided by the owner of the applicable component. +- Content outside of the above mentioned directories or restrictions above is available under the "Apache 2.0" license as defined below. + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION diff --git a/README.md b/README.md index d1c9b2da405..cd994bdc6c4 100644 --- a/README.md +++ b/README.md @@ -31,12 +31,6 @@ Download and Install [NodeJS](https://nodejs.org/en/download) >= 18.15.0 npx flowise start ``` - With username & password - - ```bash - npx flowise start --FLOWISE_USERNAME=user --FLOWISE_PASSWORD=1234 - ``` - 3. Open [http://localhost:3000](http://localhost:3000) ## 🐳 Docker @@ -138,15 +132,6 @@ Flowise has 3 different modules in a single mono repository. Any code changes will reload the app automatically on [http://localhost:8080](http://localhost:8080) -## 🔒 Authentication - -To enable app level authentication, add `FLOWISE_USERNAME` and `FLOWISE_PASSWORD` to the `.env` file in `packages/server`: - -``` -FLOWISE_USERNAME=user -FLOWISE_PASSWORD=1234 -``` - ## 🌱 Env Variables Flowise support different environment variables to configure your instance. You can specify the following variables in the `.env` file inside `packages/server` folder. Read [more](https://github.com/FlowiseAI/Flowise/blob/main/CONTRIBUTING.md#-env-variables) diff --git a/docker/.env.example b/docker/.env.example index 56ac56a809e..ecac73bf1c4 100644 --- a/docker/.env.example +++ b/docker/.env.example @@ -1,16 +1,12 @@ PORT=3000 -DATABASE_PATH=/root/.flowise -APIKEY_PATH=/root/.flowise -SECRETKEY_PATH=/root/.flowise -LOG_PATH=/root/.flowise/logs -BLOB_STORAGE_PATH=/root/.flowise/storage -# APIKEY_STORAGE_TYPE=json (json | db) +# APIKEY_PATH=/your_apikey_path/.flowise # (will be deprecated by end of 2025) -# NUMBER_OF_PROXIES= 1 -# CORS_ORIGINS=* -# IFRAME_ORIGINS=* +############################################################################################################ +############################################## DATABASE #################################################### +############################################################################################################ +DATABASE_PATH=/root/.flowise # DATABASE_TYPE=postgres # DATABASE_PORT=5432 # DATABASE_HOST="" @@ -20,34 +16,37 @@ BLOB_STORAGE_PATH=/root/.flowise/storage # DATABASE_SSL=true # DATABASE_SSL_KEY_BASE64= + +############################################################################################################ +############################################## SECRET KEYS ################################################# +############################################################################################################ + # SECRETKEY_STORAGE_TYPE=local #(local | aws) -# SECRETKEY_PATH=/your_api_key_path/.flowise -# FLOWISE_SECRETKEY_OVERWRITE=myencryptionkey +SECRETKEY_PATH=/root/.flowise +# FLOWISE_SECRETKEY_OVERWRITE=myencryptionkey # (if you want to overwrite the secret key) # SECRETKEY_AWS_ACCESS_KEY= # SECRETKEY_AWS_SECRET_KEY= # SECRETKEY_AWS_REGION=us-west-2 +# SECRETKEY_AWS_NAME=FlowiseEncryptionKey -# FLOWISE_USERNAME=user -# FLOWISE_PASSWORD=1234 -# FLOWISE_SECRETKEY_OVERWRITE=myencryptionkey -# FLOWISE_FILE_SIZE_LIMIT=50mb + +############################################################################################################ +############################################## LOGGING ##################################################### +############################################################################################################ # DEBUG=true -# LOG_LEVEL=info (error | warn | info | verbose | debug) +LOG_PATH=/root/.flowise/logs +# LOG_LEVEL=info #(error | warn | info | verbose | debug) # TOOL_FUNCTION_BUILTIN_DEP=crypto,fs # TOOL_FUNCTION_EXTERNAL_DEP=moment,lodash -# LANGCHAIN_TRACING_V2=true -# LANGCHAIN_ENDPOINT=https://api.smith.langchain.com -# LANGCHAIN_API_KEY=your_api_key -# LANGCHAIN_PROJECT=your_project -# Uncomment the following line to enable model list config, load the list of models from your local config file -# see https://raw.githubusercontent.com/FlowiseAI/Flowise/main/packages/components/models.json for the format -# MODEL_LIST_CONFIG_JSON=/your_model_list_config_file_path +############################################################################################################ +############################################## STORAGE ##################################################### +############################################################################################################ # STORAGE_TYPE=local (local | s3 | gcs) -# BLOB_STORAGE_PATH=/your_storage_path/.flowise/storage +BLOB_STORAGE_PATH=/root/.flowise/storage # S3_STORAGE_BUCKET_NAME=flowise # S3_STORAGE_ACCESS_KEY_ID= # S3_STORAGE_SECRET_ACCESS_KEY= @@ -59,12 +58,69 @@ BLOB_STORAGE_PATH=/root/.flowise/storage # GOOGLE_CLOUD_STORAGE_BUCKET_NAME= # GOOGLE_CLOUD_UNIFORM_BUCKET_ACCESS=true + +############################################################################################################ +############################################## SETTINGS #################################################### +############################################################################################################ + +# NUMBER_OF_PROXIES= 1 +# CORS_ORIGINS=* +# IFRAME_ORIGINS=* +# FLOWISE_FILE_SIZE_LIMIT=50mb # SHOW_COMMUNITY_NODES=true +# DISABLE_FLOWISE_TELEMETRY=true # DISABLED_NODES=bufferMemory,chatOpenAI (comma separated list of node names to disable) +# Uncomment the following line to enable model list config, load the list of models from your local config file +# see https://raw.githubusercontent.com/FlowiseAI/Flowise/main/packages/components/models.json for the format +# MODEL_LIST_CONFIG_JSON=/your_model_list_config_file_path + + +############################################################################################################ +############################################ AUTH PARAMETERS ############################################### +############################################################################################################ + +# APP_URL=http://localhost:3000 + +# SMTP_HOST=smtp.host.com +# SMTP_PORT=465 +# SMTP_USER=smtp_user +# SMTP_PASSWORD=smtp_password +# SMTP_SECURE=true +# ALLOW_UNAUTHORIZED_CERTS=false +# SENDER_EMAIL=team@example.com + +# JWT_AUTH_TOKEN_SECRET='AABBCCDDAABBCCDDAABBCCDDAABBCCDDAABBCCDD' +# JWT_REFRESH_TOKEN_SECRET='AABBCCDDAABBCCDDAABBCCDDAABBCCDDAABBCCDD' +# JWT_ISSUER='ISSUER' +# JWT_AUDIENCE='AUDIENCE' +# JWT_TOKEN_EXPIRY_IN_MINUTES=360 +# JWT_REFRESH_TOKEN_EXPIRY_IN_MINUTES=43200 +# EXPIRE_AUTH_TOKENS_ON_RESTART=true # (if you need to expire all tokens on app restart) +# EXPRESS_SESSION_SECRET=flowise + +# INVITE_TOKEN_EXPIRY_IN_HOURS=24 +# PASSWORD_RESET_TOKEN_EXPIRY_IN_MINS=15 +# PASSWORD_SALT_HASH_ROUNDS=10 +# TOKEN_HASH_SECRET='popcorn' + +# WORKSPACE_INVITE_TEMPLATE_PATH=/path/to/custom/workspace_invite.hbs + + +############################################################################################################ +############################################# ENTERPRISE ################################################### +############################################################################################################ + +# LICENSE_URL= +# FLOWISE_EE_LICENSE_KEY= +# OFFLINE= + + +############################################################################################################ +########################################### METRICS COLLECTION ############################################# +############################################################################################################ + +# POSTHOG_PUBLIC_API_KEY=your_posthog_public_api_key -###################### -# METRICS COLLECTION -####################### # ENABLE_METRICS=false # METRICS_PROVIDER=prometheus # prometheus | open_telemetry # METRICS_INCLUDE_NODE_METRICS=true # default is true @@ -75,15 +131,21 @@ BLOB_STORAGE_PATH=/root/.flowise/storage # METRICS_OPEN_TELEMETRY_PROTOCOL=http # http | grpc | proto (default is http) # METRICS_OPEN_TELEMETRY_DEBUG=true # default is false -# Uncomment the following lines to enable global agent proxy -# see https://www.npmjs.com/package/global-agent for more details + +############################################################################################################ +############################################### PROXY ###################################################### +############################################################################################################ + +# Uncomment the following lines to enable global agent proxy, see https://www.npmjs.com/package/global-agent for more details # GLOBAL_AGENT_HTTP_PROXY=CorporateHttpProxyUrl # GLOBAL_AGENT_HTTPS_PROXY=CorporateHttpsProxyUrl # GLOBAL_AGENT_NO_PROXY=ExceptionHostsToBypassProxyIfNeeded -###################### -# QUEUE CONFIGURATION -####################### + +############################################################################################################ +########################################### QUEUE CONFIGURATION ############################################ +############################################################################################################ + # MODE=queue #(queue | main) # QUEUE_NAME=flowise-queue # QUEUE_REDIS_EVENT_STREAM_MAX_LEN=100000 diff --git a/docker/README.md b/docker/README.md index 35d03142d04..d4dc5ae9a90 100644 --- a/docker/README.md +++ b/docker/README.md @@ -9,26 +9,11 @@ Starts Flowise from [DockerHub Image](https://hub.docker.com/r/flowiseai/flowise 3. Open [http://localhost:3000](http://localhost:3000) 4. You can bring the containers down by `docker compose stop` -## 🔒 Authentication - -1. Create `.env` file and specify the `PORT`, `FLOWISE_USERNAME`, and `FLOWISE_PASSWORD` (refer to `.env.example`) -2. Pass `FLOWISE_USERNAME` and `FLOWISE_PASSWORD` to the `docker-compose.yml` file: - ``` - environment: - - PORT=${PORT} - - FLOWISE_USERNAME=${FLOWISE_USERNAME} - - FLOWISE_PASSWORD=${FLOWISE_PASSWORD} - ``` -3. `docker compose up -d` -4. Open [http://localhost:3000](http://localhost:3000) -5. You can bring the containers down by `docker compose stop` - ## 🌱 Env Variables If you like to persist your data (flows, logs, apikeys, credentials), set these variables in the `.env` file inside `docker` folder: - DATABASE_PATH=/root/.flowise -- APIKEY_PATH=/root/.flowise - LOG_PATH=/root/.flowise/logs - SECRETKEY_PATH=/root/.flowise - BLOB_STORAGE_PATH=/root/.flowise/storage diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index 3e558486378..accc367b9b9 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -8,8 +8,6 @@ services: - PORT=${PORT} - CORS_ORIGINS=${CORS_ORIGINS} - IFRAME_ORIGINS=${IFRAME_ORIGINS} - - FLOWISE_USERNAME=${FLOWISE_USERNAME} - - FLOWISE_PASSWORD=${FLOWISE_PASSWORD} - FLOWISE_FILE_SIZE_LIMIT=${FLOWISE_FILE_SIZE_LIMIT} - DEBUG=${DEBUG} - DATABASE_PATH=${DATABASE_PATH} @@ -21,10 +19,13 @@ services: - DATABASE_PASSWORD=${DATABASE_PASSWORD} - DATABASE_SSL=${DATABASE_SSL} - DATABASE_SSL_KEY_BASE64=${DATABASE_SSL_KEY_BASE64} - - APIKEY_STORAGE_TYPE=${APIKEY_STORAGE_TYPE} - - APIKEY_PATH=${APIKEY_PATH} - SECRETKEY_PATH=${SECRETKEY_PATH} - FLOWISE_SECRETKEY_OVERWRITE=${FLOWISE_SECRETKEY_OVERWRITE} + - SECRETKEY_STORAGE_TYPE=${SECRETKEY_STORAGE_TYPE} + - SECRETKEY_AWS_ACCESS_KEY=${SECRETKEY_AWS_ACCESS_KEY} + - SECRETKEY_AWS_SECRET_KEY=${SECRETKEY_AWS_SECRET_KEY} + - SECRETKEY_AWS_REGION=${SECRETKEY_AWS_REGION} + - SECRETKEY_AWS_NAME=${SECRETKEY_AWS_NAME} - LOG_LEVEL=${LOG_LEVEL} - LOG_PATH=${LOG_PATH} - BLOB_STORAGE_PATH=${BLOB_STORAGE_PATH} diff --git a/docker/worker/docker-compose.yml b/docker/worker/docker-compose.yml index 193d9cd0df9..6140b84b5b1 100644 --- a/docker/worker/docker-compose.yml +++ b/docker/worker/docker-compose.yml @@ -8,8 +8,6 @@ services: - PORT=${PORT} - CORS_ORIGINS=${CORS_ORIGINS} - IFRAME_ORIGINS=${IFRAME_ORIGINS} - - FLOWISE_USERNAME=${FLOWISE_USERNAME} - - FLOWISE_PASSWORD=${FLOWISE_PASSWORD} - FLOWISE_FILE_SIZE_LIMIT=${FLOWISE_FILE_SIZE_LIMIT} - DEBUG=${DEBUG} - DATABASE_PATH=${DATABASE_PATH} @@ -21,8 +19,6 @@ services: - DATABASE_PASSWORD=${DATABASE_PASSWORD} - DATABASE_SSL=${DATABASE_SSL} - DATABASE_SSL_KEY_BASE64=${DATABASE_SSL_KEY_BASE64} - - APIKEY_STORAGE_TYPE=${APIKEY_STORAGE_TYPE} - - APIKEY_PATH=${APIKEY_PATH} - SECRETKEY_PATH=${SECRETKEY_PATH} - FLOWISE_SECRETKEY_OVERWRITE=${FLOWISE_SECRETKEY_OVERWRITE} - LOG_LEVEL=${LOG_LEVEL} diff --git a/i18n/CONTRIBUTING-ZH.md b/i18n/CONTRIBUTING-ZH.md index 45626785eee..412a0da186e 100644 --- a/i18n/CONTRIBUTING-ZH.md +++ b/i18n/CONTRIBUTING-ZH.md @@ -119,16 +119,12 @@ Flowise 在一个单一的单体存储库中有 3 个不同的模块。 Flowise 支持不同的环境变量来配置您的实例。您可以在 `packages/server` 文件夹中的 `.env` 文件中指定以下变量。阅读[更多信息](https://docs.flowiseai.com/environment-variables) | 变量名 | 描述 | 类型 | 默认值 | -| ---------------------------- | ------------------------------------------------------- | ----------------------------------------------- | ----------------------------------- | --- | -| PORT | Flowise 运行的 HTTP 端口 | 数字 | 3000 | -| FLOWISE_USERNAME | 登录用户名 | 字符串 | | -| FLOWISE_PASSWORD | 登录密码 | 字符串 | | +| ---------------------------- | ------------------------------------------------------- | ----------------------------------------------- | ----------------------------------- | --- | --- | +| PORT | Flowise 运行的 HTTP 端口 | 数字 | 3000 | | | | FLOWISE_FILE_SIZE_LIMIT | 上传文件大小限制 | 字符串 | 50mb | | | DEBUG | 打印组件的日志 | 布尔值 | | | LOG_PATH | 存储日志文件的位置 | 字符串 | `your-path/Flowise/logs` | | LOG_LEVEL | 日志的不同级别 | 枚举字符串: `error`, `info`, `verbose`, `debug` | `info` | -| APIKEY_STORAGE_TYPE | 存储 API 密钥的存储类型 | 枚举字符串: `json`, `db` | `json` | -| APIKEY_PATH | 存储 API 密钥的位置, 当`APIKEY_STORAGE_TYPE`是`json` | 字符串 | `your-path/Flowise/packages/server` | | TOOL_FUNCTION_BUILTIN_DEP | 用于工具函数的 NodeJS 内置模块 | 字符串 | | | TOOL_FUNCTION_EXTERNAL_DEP | 用于工具函数的外部模块 | 字符串 | | | DATABASE_TYPE | 存储 flowise 数据的数据库类型 | 枚举字符串: `sqlite`, `mysql`, `postgres` | `sqlite` | diff --git a/i18n/README-JA.md b/i18n/README-JA.md index a329059ed18..0ea1ae38698 100644 --- a/i18n/README-JA.md +++ b/i18n/README-JA.md @@ -31,12 +31,6 @@ npx flowise start ``` - ユーザー名とパスワードを入力 - - ```bash - npx flowise start --FLOWISE_USERNAME=user --FLOWISE_PASSWORD=1234 - ``` - 3. [http://localhost:3000](http://localhost:3000) を開く ## 🐳 Docker @@ -127,15 +121,6 @@ Flowise には、3 つの異なるモジュールが 1 つの mono リポジト コードの変更は [http://localhost:8080](http://localhost:8080) に自動的にアプリをリロードします -## 🔒 認証 - -アプリレベルの認証を有効にするには、 `FLOWISE_USERNAME` と `FLOWISE_PASSWORD` を `packages/server` の `.env` ファイルに追加します: - -``` -FLOWISE_USERNAME=user -FLOWISE_PASSWORD=1234 -``` - ## 🌱 環境変数 Flowise は、インスタンスを設定するためのさまざまな環境変数をサポートしています。`packages/server` フォルダ内の `.env` ファイルで以下の変数を指定することができる。[続き](https://github.com/FlowiseAI/Flowise/blob/main/CONTRIBUTING.md#-env-variables)を読む @@ -197,9 +182,9 @@ Flowise は、インスタンスを設定するためのさまざまな環境変 -[コントリビューティングガイド](CONTRIBUTING.md)を参照してください。質問や問題があれば、[Discord](https://discord.gg/jbaHfsRVBW) までご連絡ください。 +[コントリビューティングガイド](../CONTRIBUTING.md)を参照してください。質問や問題があれば、[Discord](https://discord.gg/jbaHfsRVBW) までご連絡ください。 [![Star History Chart](https://api.star-history.com/svg?repos=FlowiseAI/Flowise&type=Timeline)](https://star-history.com/#FlowiseAI/Flowise&Date) ## 📄 ライセンス -このリポジトリのソースコードは、[Apache License Version 2.0](LICENSE.md)の下で利用可能です。 +このリポジトリのソースコードは、[Apache License Version 2.0](../LICENSE.md)の下で利用可能です。 diff --git a/i18n/README-KR.md b/i18n/README-KR.md index c02b0b06638..7caaa01a4ae 100644 --- a/i18n/README-KR.md +++ b/i18n/README-KR.md @@ -31,12 +31,6 @@ npx flowise start ``` - 사용자 이름과 비밀번호로 시작하기 - - ```bash - npx flowise start --FLOWISE_USERNAME=user --FLOWISE_PASSWORD=1234 - ``` - 3. [http://localhost:3000](http://localhost:3000) URL 열기 ## 🐳 도커(Docker)를 활용하여 시작하기 @@ -127,15 +121,6 @@ Flowise는 단일 리포지토리에 3개의 서로 다른 모듈이 있습니 코드가 변경되면 [http://localhost:8080](http://localhost:8080)에서 자동으로 애플리케이션을 새로고침 합니다. -## 🔒 인증 - -애플리케이션 수준의 인증을 사용하려면 `packages/server`의 `.env` 파일에 `FLOWISE_USERNAME` 및 `FLOWISE_PASSWORD`를 추가합니다: - -``` -FLOWISE_USERNAME=user -FLOWISE_PASSWORD=1234 -``` - ## 🌱 환경 변수 Flowise는 인스턴스 구성을 위한 다양한 환경 변수를 지원합니다. `packages/server` 폴더 내 `.env` 파일에 다양한 환경 변수를 지정할 수 있습니다. [자세히 보기](https://github.com/FlowiseAI/Flowise/blob/main/CONTRIBUTING.md#-env-variables) @@ -197,9 +182,9 @@ Flowise는 인스턴스 구성을 위한 다양한 환경 변수를 지원합니 -[contributing guide](CONTRIBUTING.md)를 살펴보세요. 디스코드 [Discord](https://discord.gg/jbaHfsRVBW) 채널에서도 이슈나 질의응답을 진행하실 수 있습니다. +[contributing guide](../CONTRIBUTING.md)를 살펴보세요. 디스코드 [Discord](https://discord.gg/jbaHfsRVBW) 채널에서도 이슈나 질의응답을 진행하실 수 있습니다. [![Star History Chart](https://api.star-history.com/svg?repos=FlowiseAI/Flowise&type=Timeline)](https://star-history.com/#FlowiseAI/Flowise&Date) ## 📄 라이센스 -본 리포지토리의 소스코드는 [Apache License Version 2.0](LICENSE.md) 라이센스가 적용됩니다. +본 리포지토리의 소스코드는 [Apache License Version 2.0](../LICENSE.md) 라이센스가 적용됩니다. diff --git a/i18n/README-TW.md b/i18n/README-TW.md index f051e844e6d..cdf33443370 100644 --- a/i18n/README-TW.md +++ b/i18n/README-TW.md @@ -31,12 +31,6 @@ npx flowise start ``` - 使用用戶名和密碼 - - ```bash - npx flowise start --FLOWISE_USERNAME=user --FLOWISE_PASSWORD=1234 - ``` - 3. 打開 [http://localhost:3000](http://localhost:3000) ## 🐳 Docker @@ -138,15 +132,6 @@ Flowise 在單個 mono 存儲庫中有 3 個不同的模塊。 任何代碼更改都會自動重新加載應用程序 [http://localhost:8080](http://localhost:8080) -## 🔒 認證 - -要啟用應用級別的身份驗證,請在 `packages/server` 中的 `.env` 文件中添加 `FLOWISE_USERNAME` 和 `FLOWISE_PASSWORD`: - -``` -FLOWISE_USERNAME=user -FLOWISE_PASSWORD=1234 -``` - ## 🌱 環境變量 Flowise 支持不同的環境變量來配置您的實例。您可以在 `packages/server` 文件夾中的 `.env` 文件中指定以下變量。閱讀 [更多](https://github.com/FlowiseAI/Flowise/blob/main/CONTRIBUTING.md#-env-variables) @@ -209,9 +194,9 @@ Flowise 支持不同的環境變量來配置您的實例。您可以在 `package -請參閱 [貢獻指南](CONTRIBUTING.md)。如果您有任何問題或問題,請通過 [Discord](https://discord.gg/jbaHfsRVBW) 與我們聯繫。 +請參閱 [貢獻指南](../CONTRIBUTING.md)。如果您有任何問題或問題,請通過 [Discord](https://discord.gg/jbaHfsRVBW) 與我們聯繫。 [![Star History Chart](https://api.star-history.com/svg?repos=FlowiseAI/Flowise&type=Timeline)](https://star-history.com/#FlowiseAI/Flowise&Date) ## 📄 許可證 -此存儲庫中的源代碼根據 [Apache 許可證版本 2.0](LICENSE.md) 提供。 +此存儲庫中的源代碼根據 [Apache 許可證版本 2.0](../LICENSE.md) 提供。 diff --git a/i18n/README-ZH.md b/i18n/README-ZH.md index 5f313fb3248..d744d739272 100644 --- a/i18n/README-ZH.md +++ b/i18n/README-ZH.md @@ -31,12 +31,6 @@ npx flowise start ``` - 使用用户名和密码 - - ```bash - npx flowise start --FLOWISE_USERNAME=user --FLOWISE_PASSWORD=1234 - ``` - 3. 打开 [http://localhost:3000](http://localhost:3000) ## 🐳 Docker @@ -127,15 +121,6 @@ Flowise 在一个单一的代码库中有 3 个不同的模块。 任何代码更改都会自动重新加载应用程序,访问 [http://localhost:8080](http://localhost:8080) -## 🔒 认证 - -要启用应用程序级身份验证,在 `packages/server` 的 `.env` 文件中添加 `FLOWISE_USERNAME` 和 `FLOWISE_PASSWORD`: - -``` -FLOWISE_USERNAME=user -FLOWISE_PASSWORD=1234 -``` - ## 🌱 环境变量 Flowise 支持不同的环境变量来配置您的实例。您可以在 `packages/server` 文件夹中的 `.env` 文件中指定以下变量。了解更多信息,请阅读[文档](https://github.com/FlowiseAI/Flowise/blob/main/CONTRIBUTING.md#-env-variables) @@ -197,8 +182,8 @@ Flowise 支持不同的环境变量来配置您的实例。您可以在 `package -参见[贡献指南](CONTRIBUTING.md)。如果您有任何问题或问题,请在[Discord](https://discord.gg/jbaHfsRVBW)上与我们联系。 +参见[贡献指南](CONTRIBUTING-ZH.md)。如果您有任何问题或问题,请在[Discord](https://discord.gg/jbaHfsRVBW)上与我们联系。 ## 📄 许可证 -此代码库中的源代码在[Apache License Version 2.0 许可证](LICENSE.md)下提供。 +此代码库中的源代码在[Apache License Version 2.0 许可证](../LICENSE.md)下提供。 diff --git a/package.json b/package.json index f7855fef5bd..f0a83893714 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "flowise", - "version": "3.0.0", + "version": "3.0.1", "private": true, "homepage": "https://flowiseai.com", "workspaces": [ @@ -20,6 +20,7 @@ "start-worker": "run-script-os", "start-worker:windows": "cd packages/server/bin && run worker", "start-worker:default": "cd packages/server/bin && ./run worker", + "test": "turbo run test", "clean": "pnpm --filter \"./packages/**\" clean", "nuke": "pnpm --filter \"./packages/**\" nuke && rimraf node_modules .turbo", "format": "prettier --write \"**/*.{ts,tsx,md}\"", diff --git a/packages/components/evaluation/EvaluationRunTracer.ts b/packages/components/evaluation/EvaluationRunTracer.ts new file mode 100644 index 00000000000..ce286eb5227 --- /dev/null +++ b/packages/components/evaluation/EvaluationRunTracer.ts @@ -0,0 +1,165 @@ +import { RunCollectorCallbackHandler } from '@langchain/core/tracers/run_collector' +import { Run } from '@langchain/core/tracers/base' +import { EvaluationRunner } from './EvaluationRunner' +import { encoding_for_model, get_encoding } from '@dqbd/tiktoken' + +export class EvaluationRunTracer extends RunCollectorCallbackHandler { + evaluationRunId: string + model: string + + constructor(id: string) { + super() + this.evaluationRunId = id + } + + async persistRun(run: Run): Promise { + return super.persistRun(run) + } + + countPromptTokens = (encoding: any, run: Run): number => { + let promptTokenCount = 0 + if (encoding) { + if (run.inputs?.messages?.length > 0 && run.inputs?.messages[0]?.length > 0) { + run.inputs.messages[0].map((message: any) => { + let content = message.content + ? message.content + : message.SystemMessage?.content + ? message.SystemMessage.content + : message.HumanMessage?.content + ? message.HumanMessage.content + : message.AIMessage?.content + ? message.AIMessage.content + : undefined + promptTokenCount += content ? encoding.encode(content).length : 0 + }) + } + if (run.inputs?.prompts?.length > 0) { + const content = run.inputs.prompts[0] + promptTokenCount += content ? encoding.encode(content).length : 0 + } + } + return promptTokenCount + } + + countCompletionTokens = (encoding: any, run: Run): number => { + let completionTokenCount = 0 + if (encoding) { + if (run.outputs?.generations?.length > 0 && run.outputs?.generations[0]?.length > 0) { + run.outputs?.generations[0].map((chunk: any) => { + let content = chunk.text ? chunk.text : chunk.message?.content ? chunk.message?.content : undefined + completionTokenCount += content ? encoding.encode(content).length : 0 + }) + } + } + return completionTokenCount + } + + extractModelName = (run: Run): string => { + return ( + (run?.serialized as any)?.kwargs?.model || + (run?.serialized as any)?.kwargs?.model_name || + (run?.extra as any)?.metadata?.ls_model_name || + (run?.extra as any)?.metadata?.fw_model_name + ) + } + + onLLMEnd?(run: Run): void | Promise { + if (run.name) { + let provider = run.name + if (provider === 'BedrockChat') { + provider = 'awsChatBedrock' + } + EvaluationRunner.addMetrics( + this.evaluationRunId, + JSON.stringify({ + provider: provider + }) + ) + } + + let model = this.extractModelName(run) + if (run.outputs?.llmOutput?.tokenUsage) { + const tokenUsage = run.outputs?.llmOutput?.tokenUsage + if (tokenUsage) { + const metric = { + completionTokens: tokenUsage.completionTokens, + promptTokens: tokenUsage.promptTokens, + model: model, + totalTokens: tokenUsage.totalTokens + } + EvaluationRunner.addMetrics(this.evaluationRunId, JSON.stringify(metric)) + } + } else if ( + run.outputs?.generations?.length > 0 && + run.outputs?.generations[0].length > 0 && + run.outputs?.generations[0][0]?.message?.usage_metadata?.total_tokens + ) { + const usage_metadata = run.outputs?.generations[0][0]?.message?.usage_metadata + if (usage_metadata) { + const metric = { + completionTokens: usage_metadata.output_tokens, + promptTokens: usage_metadata.input_tokens, + model: model || this.model, + totalTokens: usage_metadata.total_tokens + } + EvaluationRunner.addMetrics(this.evaluationRunId, JSON.stringify(metric)) + } + } else { + let encoding: any = undefined + let promptInputTokens = 0 + let completionTokenCount = 0 + try { + encoding = encoding_for_model(model as any) + promptInputTokens = this.countPromptTokens(encoding, run) + completionTokenCount = this.countCompletionTokens(encoding, run) + } catch (e) { + try { + // as tiktoken will fail for non openai models, assume that is 'cl100k_base' + encoding = get_encoding('cl100k_base') + promptInputTokens = this.countPromptTokens(encoding, run) + completionTokenCount = this.countCompletionTokens(encoding, run) + } catch (e) { + // stay silent + } + } + const metric = { + completionTokens: completionTokenCount, + promptTokens: promptInputTokens, + model: model, + totalTokens: promptInputTokens + completionTokenCount + } + EvaluationRunner.addMetrics(this.evaluationRunId, JSON.stringify(metric)) + //cleanup + this.model = '' + } + } + + async onRunUpdate(run: Run): Promise { + const json = { + [run.run_type]: elapsed(run) + } + let metric = JSON.stringify(json) + if (metric) { + EvaluationRunner.addMetrics(this.evaluationRunId, metric) + } + + if (run.run_type === 'llm') { + let model = this.extractModelName(run) + if (model) { + EvaluationRunner.addMetrics(this.evaluationRunId, JSON.stringify({ model: model })) + this.model = model + } + // OpenAI non streaming models + const estimatedTokenUsage = run.outputs?.llmOutput?.estimatedTokenUsage + if (estimatedTokenUsage && typeof estimatedTokenUsage === 'object' && Object.keys(estimatedTokenUsage).length > 0) { + EvaluationRunner.addMetrics(this.evaluationRunId, estimatedTokenUsage) + } + } + } +} + +function elapsed(run: Run) { + if (!run.end_time) return '' + const elapsed = run.end_time - run.start_time + return `${elapsed.toFixed(2)}` +} diff --git a/packages/components/evaluation/EvaluationRunTracerLlama.ts b/packages/components/evaluation/EvaluationRunTracerLlama.ts new file mode 100644 index 00000000000..872b16e3510 --- /dev/null +++ b/packages/components/evaluation/EvaluationRunTracerLlama.ts @@ -0,0 +1,186 @@ +import { ChatMessage, LLMEndEvent, LLMStartEvent, LLMStreamEvent, MessageContentTextDetail, RetrievalEndEvent, Settings } from 'llamaindex' +import { EvaluationRunner } from './EvaluationRunner' +import { additionalCallbacks, ICommonObject, INodeData } from '../src' +import { RetrievalStartEvent } from 'llamaindex/dist/type/llm/types' +import { AgentEndEvent, AgentStartEvent } from 'llamaindex/dist/type/agent/types' +import { encoding_for_model } from '@dqbd/tiktoken' +import { MessageContent } from '@langchain/core/messages' + +export class EvaluationRunTracerLlama { + evaluationRunId: string + static cbInit = false + static startTimes = new Map() + static models = new Map() + static tokenCounts = new Map() + + constructor(id: string) { + this.evaluationRunId = id + EvaluationRunTracerLlama.constructCallBacks() + } + + static constructCallBacks = () => { + if (!EvaluationRunTracerLlama.cbInit) { + Settings.callbackManager.on('llm-start', (event: LLMStartEvent) => { + const evalID = (event as any).reason.parent?.caller?.evaluationRunId || (event as any).reason.caller?.evaluationRunId + if (!evalID) return + const model = (event as any).reason?.caller?.model + if (model) { + EvaluationRunTracerLlama.models.set(evalID, model) + try { + const encoding = encoding_for_model(model) + if (encoding) { + const { messages } = event.detail.payload + let tokenCount = messages.reduce((count: number, message: ChatMessage) => { + return count + encoding.encode(extractText(message.content)).length + }, 0) + EvaluationRunTracerLlama.tokenCounts.set(evalID + '_promptTokens', tokenCount) + EvaluationRunTracerLlama.tokenCounts.set(evalID + '_outputTokens', 0) + } + } catch (e) { + // catch the error and continue to work. + } + } + EvaluationRunTracerLlama.startTimes.set(evalID + '_llm', event.timeStamp) + }) + Settings.callbackManager.on('llm-end', (event: LLMEndEvent) => { + this.calculateAndSetMetrics(event, 'llm') + }) + Settings.callbackManager.on('llm-stream', (event: LLMStreamEvent) => { + const evalID = (event as any).reason.parent?.caller?.evaluationRunId || (event as any).reason.caller?.evaluationRunId + if (!evalID) return + const { chunk } = event.detail.payload + const { delta } = chunk + const model = (event as any).reason?.caller?.model + try { + const encoding = encoding_for_model(model) + if (encoding) { + let tokenCount = EvaluationRunTracerLlama.tokenCounts.get(evalID + '_outputTokens') || 0 + tokenCount += encoding.encode(extractText(delta)).length + EvaluationRunTracerLlama.tokenCounts.set(evalID + '_outputTokens', tokenCount) + } + } catch (e) { + // catch the error and continue to work. + } + }) + Settings.callbackManager.on('retrieve-start', (event: RetrievalStartEvent) => { + const evalID = (event as any).reason.parent?.caller?.evaluationRunId || (event as any).reason.caller?.evaluationRunId + if (evalID) { + EvaluationRunTracerLlama.startTimes.set(evalID + '_retriever', event.timeStamp) + } + }) + Settings.callbackManager.on('retrieve-end', (event: RetrievalEndEvent) => { + this.calculateAndSetMetrics(event, 'retriever') + }) + Settings.callbackManager.on('agent-start', (event: AgentStartEvent) => { + const evalID = (event as any).reason.parent?.caller?.evaluationRunId || (event as any).reason.caller?.evaluationRunId + if (evalID) { + EvaluationRunTracerLlama.startTimes.set(evalID + '_agent', event.timeStamp) + } + }) + Settings.callbackManager.on('agent-end', (event: AgentEndEvent) => { + this.calculateAndSetMetrics(event, 'agent') + }) + EvaluationRunTracerLlama.cbInit = true + } + } + + private static calculateAndSetMetrics(event: any, label: string) { + const evalID = event.reason.parent?.caller?.evaluationRunId || event.reason.caller?.evaluationRunId + if (!evalID) return + const startTime = EvaluationRunTracerLlama.startTimes.get(evalID + '_' + label) as number + let model = + (event as any).reason?.caller?.model || (event as any).reason?.caller?.llm?.model || EvaluationRunTracerLlama.models.get(evalID) + + if (event.detail.payload?.response?.message && model) { + try { + const encoding = encoding_for_model(model) + if (encoding) { + let tokenCount = EvaluationRunTracerLlama.tokenCounts.get(evalID + '_outputTokens') || 0 + tokenCount += encoding.encode(event.detail.payload.response?.message?.content || '').length + EvaluationRunTracerLlama.tokenCounts.set(evalID + '_outputTokens', tokenCount) + } + } catch (e) { + // catch the error and continue to work. + } + } + + // Anthropic + if (event.detail?.payload?.response?.raw?.usage) { + const usage = event.detail.payload.response.raw.usage + if (usage.output_tokens) { + const metric = { + completionTokens: usage.output_tokens, + promptTokens: usage.input_tokens, + model: model, + totalTokens: usage.input_tokens + usage.output_tokens + } + EvaluationRunner.addMetrics(evalID, JSON.stringify(metric)) + } else if (usage.completion_tokens) { + const metric = { + completionTokens: usage.completion_tokens, + promptTokens: usage.prompt_tokens, + model: model, + totalTokens: usage.total_tokens + } + EvaluationRunner.addMetrics(evalID, JSON.stringify(metric)) + } + } else if (event.detail?.payload?.response?.raw['amazon-bedrock-invocationMetrics']) { + const usage = event.detail?.payload?.response?.raw['amazon-bedrock-invocationMetrics'] + const metric = { + completionTokens: usage.outputTokenCount, + promptTokens: usage.inputTokenCount, + model: event.detail?.payload?.response?.raw.model, + totalTokens: usage.inputTokenCount + usage.outputTokenCount + } + EvaluationRunner.addMetrics(evalID, JSON.stringify(metric)) + } else { + const metric = { + [label]: (event.timeStamp - startTime).toFixed(2), + completionTokens: EvaluationRunTracerLlama.tokenCounts.get(evalID + '_outputTokens'), + promptTokens: EvaluationRunTracerLlama.tokenCounts.get(evalID + '_promptTokens'), + model: model || EvaluationRunTracerLlama.models.get(evalID) || '', + totalTokens: + (EvaluationRunTracerLlama.tokenCounts.get(evalID + '_outputTokens') || 0) + + (EvaluationRunTracerLlama.tokenCounts.get(evalID + '_promptTokens') || 0) + } + EvaluationRunner.addMetrics(evalID, JSON.stringify(metric)) + } + + //cleanup + EvaluationRunTracerLlama.startTimes.delete(evalID + '_' + label) + EvaluationRunTracerLlama.startTimes.delete(evalID + '_outputTokens') + EvaluationRunTracerLlama.startTimes.delete(evalID + '_promptTokens') + EvaluationRunTracerLlama.models.delete(evalID) + } + + static async injectEvaluationMetadata(nodeData: INodeData, options: ICommonObject, callerObj: any) { + if (options.evaluationRunId && callerObj) { + // these are needed for evaluation runs + options.llamaIndex = true + await additionalCallbacks(nodeData, options) + Object.defineProperty(callerObj, 'evaluationRunId', { + enumerable: true, + configurable: true, + writable: true, + value: options.evaluationRunId + }) + } + } +} + +// from https://github.com/run-llama/LlamaIndexTS/blob/main/packages/core/src/llm/utils.ts +export function extractText(message: MessageContent): string { + if (typeof message !== 'string' && !Array.isArray(message)) { + console.warn('extractText called with non-MessageContent message, this is likely a bug.') + return `${message}` + } else if (typeof message !== 'string' && Array.isArray(message)) { + // message is of type MessageContentDetail[] - retrieve just the text parts and concatenate them + // so we can pass them to the context generator + return message + .filter((c): c is MessageContentTextDetail => c.type === 'text') + .map((c) => c.text) + .join('\n\n') + } else { + return message + } +} diff --git a/packages/components/evaluation/EvaluationRunner.ts b/packages/components/evaluation/EvaluationRunner.ts new file mode 100644 index 00000000000..73f22b70c37 --- /dev/null +++ b/packages/components/evaluation/EvaluationRunner.ts @@ -0,0 +1,172 @@ +import axios from 'axios' +import { v4 as uuidv4 } from 'uuid' +import { ICommonObject } from '../src' + +import { getModelConfigByModelName, MODEL_TYPE } from '../src/modelLoader' + +export class EvaluationRunner { + static metrics = new Map() + static async getAndDeleteMetrics(id: string) { + const val = EvaluationRunner.metrics.get(id) + if (val) { + try { + //first lets get the provider and model + let selectedModel = undefined + let selectedProvider = undefined + if (val && val.length > 0) { + let modelName = '' + let providerName = '' + for (let i = 0; i < val.length; i++) { + const metric = val[i] + if (typeof metric === 'object') { + modelName = metric['model'] + providerName = metric['provider'] + } else { + modelName = JSON.parse(metric)['model'] + providerName = JSON.parse(metric)['provider'] + } + + if (modelName) { + selectedModel = modelName + } + if (providerName) { + selectedProvider = providerName + } + } + } + let modelConfig = await getModelConfigByModelName(MODEL_TYPE.CHAT, selectedProvider, selectedModel) + if (modelConfig) { + val.push(JSON.stringify({ cost_values: modelConfig })) + } else { + modelConfig = await getModelConfigByModelName(MODEL_TYPE.LLM, selectedProvider, selectedModel) + if (modelConfig) { + val.push(JSON.stringify({ cost_values: modelConfig })) + } + } + } catch (error) { + //stay silent + } + } + EvaluationRunner.metrics.delete(id) + return val + } + + static addMetrics(id: string, metric: string) { + if (EvaluationRunner.metrics.has(id)) { + EvaluationRunner.metrics.get(id)?.push(metric) + } else { + EvaluationRunner.metrics.set(id, [metric]) + } + } + + baseURL = '' + + constructor(baseURL: string) { + this.baseURL = baseURL + } + + getChatflowApiKey(chatflowId: string, apiKeys: { chatflowId: string; apiKey: string }[] = []) { + return apiKeys.find((item) => item.chatflowId === chatflowId)?.apiKey || '' + } + + public async runEvaluations(data: ICommonObject) { + const chatflowIds = JSON.parse(data.chatflowId) + const returnData: ICommonObject = {} + returnData.evaluationId = data.evaluationId + returnData.runDate = new Date() + returnData.rows = [] + for (let i = 0; i < data.dataset.rows.length; i++) { + returnData.rows.push({ + input: data.dataset.rows[i].input, + expectedOutput: data.dataset.rows[i].output, + itemNo: data.dataset.rows[i].sequenceNo, + evaluations: [], + status: 'pending' + }) + } + for (let i = 0; i < chatflowIds.length; i++) { + const chatflowId = chatflowIds[i] + await this.evaluateChatflow(chatflowId, this.getChatflowApiKey(chatflowId, data.apiKeys), data, returnData) + } + return returnData + } + + async evaluateChatflow(chatflowId: string, apiKey: string, data: any, returnData: any) { + for (let i = 0; i < data.dataset.rows.length; i++) { + const item = data.dataset.rows[i] + const uuid = uuidv4() + + const headers: any = { + 'X-Request-ID': uuid, + 'X-Flowise-Evaluation': 'true' + } + if (apiKey) { + headers['Authorization'] = `Bearer ${apiKey}` + } + let axiosConfig = { + headers: headers + } + let startTime = performance.now() + const runData: any = {} + runData.chatflowId = chatflowId + runData.startTime = startTime + const postData: any = { question: item.input, evaluationRunId: uuid, evaluation: true } + if (data.sessionId) { + postData.overrideConfig = { sessionId: data.sessionId } + } + try { + let response = await axios.post(`${this.baseURL}/api/v1/prediction/${chatflowId}`, postData, axiosConfig) + const endTime = performance.now() + const timeTaken = (endTime - startTime).toFixed(2) + if (response?.data?.metrics) { + runData.metrics = response.data.metrics + runData.metrics.push({ + apiLatency: timeTaken + }) + } else { + runData.metrics = [ + { + apiLatency: timeTaken + } + ] + } + runData.status = 'complete' + let resultText = '' + if (response.data.text) resultText = response.data.text + else if (response.data.json) resultText = '```json\n' + JSON.stringify(response.data.json, null, 2) + else resultText = JSON.stringify(response.data, null, 2) + + runData.actualOutput = resultText + runData.latency = timeTaken + runData.error = '' + } catch (error: any) { + runData.status = 'error' + runData.actualOutput = '' + runData.error = error?.response?.data?.message + ? error.response.data.message + : error?.message + ? error.message + : 'Unknown error' + try { + if (runData.error.indexOf('-') > -1) { + // if there is a dash, remove all content before + runData.error = 'Error: ' + runData.error.substr(runData.error.indexOf('-') + 1).trim() + } + } catch (error) { + //stay silent + } + const endTime = performance.now() + const timeTaken = (endTime - startTime).toFixed(2) + runData.metrics = [ + { + apiLatency: timeTaken + } + ] + runData.latency = timeTaken + } + runData.uuid = uuid + returnData.rows[i].evaluations.push(runData) + } + return returnData + } +} diff --git a/packages/components/models.json b/packages/components/models.json index 47041981c2d..6ec2bb4ce8a 100644 --- a/packages/components/models.json +++ b/packages/components/models.json @@ -3,52 +3,63 @@ { "name": "awsChatBedrock", "models": [ + { + "label": "anthropic.claude-sonnet-4-20250514-v1:0", + "name": "anthropic.claude-sonnet-4-20250514-v1:0", + "description": "Claude 4 Sonnet", + "input_cost": 0.000003, + "output_cost": 0.000015 + }, + { + "label": "anthropic.claude-opus-4-20250514-v1:0", + "name": "anthropic.claude-opus-4-20250514-v1:0", + "description": "Claude 4 Opus", + "input_cost": 0.000015, + "output_cost": 0.000075 + }, { "label": "anthropic.claude-3-7-sonnet-20250219-v1:0", "name": "anthropic.claude-3-7-sonnet-20250219-v1:0", - "description": "(20250219-v1:0) specific version of Claude Sonnet 3.7 - hybrid reasoning model", + "description": "(20250219-v1:0) specific version of Claude Sonnet 3.7", "input_cost": 3e-6, "output_cost": 0.000015 }, { "label": "anthropic.claude-3-5-haiku-20241022-v1:0", "name": "anthropic.claude-3-5-haiku-20241022-v1:0", - "description": "(20241022-v1:0) specific version of Claude Haiku 3.5 - fastest model", + "description": "(20241022-v1:0) specific version of Claude Haiku 3.5", "input_cost": 8e-7, "output_cost": 4e-6 }, { "label": "anthropic.claude-3.5-sonnet-20241022-v2:0", "name": "anthropic.claude-3-5-sonnet-20241022-v2:0", - "description": "(20241022-v2:0) specific version of Claude Sonnet 3.5 - most intelligent model", + "description": "(20241022-v2:0) specific version of Claude Sonnet 3.5", "input_cost": 3e-6, "output_cost": 0.000015 }, { "label": "anthropic.claude-3.5-sonnet-20240620-v1:0", "name": "anthropic.claude-3.5-sonnet-20240620-v1:0", - "description": "(20240620-v1:0) specific version of Claude Sonnet 3.5 - most intelligent model", + "description": "(20240620-v1:0) specific version of Claude Sonnet 3.5", "input_cost": 3e-6, "output_cost": 0.000015 }, { "label": "anthropic.claude-3-opus", "name": "anthropic.claude-3-opus-20240229-v1:0", - "description": "Powerful model for highly complex tasks, reasoning and analysis", "input_cost": 0.000015, "output_cost": 0.000075 }, { "label": "anthropic.claude-3-sonnet", "name": "anthropic.claude-3-sonnet-20240229-v1:0", - "description": "Balance of intelligence and speed", "input_cost": 0.000003, "output_cost": 0.000015 }, { "label": "anthropic.claude-3-haiku", "name": "anthropic.claude-3-haiku-20240307-v1:0", - "description": "Fastest and most compact model for near-instant responsiveness", "input_cost": 2.5e-7, "output_cost": 1.25e-6 }, @@ -411,38 +422,38 @@ { "name": "chatAnthropic", "models": [ + { + "label": "claude-sonnet-4-0", + "name": "claude-sonnet-4-0", + "description": "Claude 4 Sonnet", + "input_cost": 0.000003, + "output_cost": 0.000015 + }, + { + "label": "claude-opus-4-0", + "name": "claude-opus-4-0", + "description": "Claude 4 Opus", + "input_cost": 0.000015, + "output_cost": 0.000075 + }, { "label": "claude-3-7-sonnet-latest", "name": "claude-3-7-sonnet-latest", - "description": "Most recent snapshot version of Claude Sonnet 3.7 model - hybrid reasoning model", + "description": "Most recent snapshot version of Claude Sonnet 3.7", "input_cost": 3e-6, "output_cost": 0.000015 }, { "label": "claude-3-5-haiku-latest", "name": "claude-3-5-haiku-latest", - "description": "Most recent snapshot version of Claude Haiku 3.5 - fastest model", + "description": "Most recent snapshot version of Claude Haiku 3.5", "input_cost": 8e-7, "output_cost": 4e-6 }, { "label": "claude-3.5-sonnet-latest", "name": "claude-3-5-sonnet-latest", - "description": "Most recent snapshot version of Claude Sonnet 3.5 model - most intelligent model", - "input_cost": 3e-6, - "output_cost": 0.000015 - }, - { - "label": "claude-3.5-sonnet-20241022", - "name": "claude-3-5-sonnet-20241022", - "description": "(20241022) specific version of Claude Sonnet 3.5 - most intelligent model", - "input_cost": 3e-6, - "output_cost": 0.000015 - }, - { - "label": "claude-3.5-sonnet-20240620", - "name": "claude-3-5-sonnet-20240620", - "description": "(20240620) specific version of Claude Sonnet 3.5 - most intelligent model", + "description": "Most recent snapshot version of Claude Sonnet 3.5 model", "input_cost": 3e-6, "output_cost": 0.000015 }, @@ -466,27 +477,6 @@ "description": "Fastest and most compact model, designed for near-instant responsiveness", "input_cost": 2.5e-7, "output_cost": 1.25e-6 - }, - { - "label": "claude-2.0 (legacy)", - "name": "claude-2.0", - "description": "Claude 2 latest major version, automatically get updates to the model as they are released", - "input_cost": 0.000008, - "output_cost": 0.000024 - }, - { - "label": "claude-2.1 (legacy)", - "name": "claude-2.1", - "description": "Claude 2 latest full version", - "input_cost": 0.000008, - "output_cost": 0.000024 - }, - { - "label": "claude-instant-1.2 (legacy)", - "name": "claude-instant-1.2", - "description": "Claude Instant latest major version, automatically get updates to the model as they are released", - "input_cost": 0.000008, - "output_cost": 0.000024 } ] }, @@ -533,6 +523,12 @@ { "name": "chatGoogleGenerativeAI", "models": [ + { + "label": "gemini-2.5-flash-preview-05-20", + "name": "gemini-2.5-flash-preview-05-20", + "input_cost": 0.15e-6, + "output_cost": 6e-7 + }, { "label": "gemini-2.5-pro-preview-03-25", "name": "gemini-2.5-pro-preview-03-25", @@ -621,24 +617,38 @@ "input_cost": 1.25e-7, "output_cost": 3.75e-7 }, + { + "label": "claude-sonnet-4@20250514", + "name": "claude-sonnet-4@20250514", + "description": "Claude 4 Sonnet", + "input_cost": 0.000003, + "output_cost": 0.000015 + }, + { + "label": "claude-opus-4@20250514", + "name": "claude-opus-4@20250514", + "description": "Claude 4 Opus", + "input_cost": 0.000015, + "output_cost": 0.000075 + }, { "label": "claude-3-7-sonnet@20250219", "name": "claude-3-7-sonnet@20250219", - "description": "(20250219-v1:0) specific version of Claude Sonnet 3.7 - hybrid reasoning model", + "description": "(20250219-v1:0) specific version of Claude Sonnet 3.7", "input_cost": 3e-6, "output_cost": 0.000015 }, { "label": "claude-3-5-haiku@20241022", "name": "claude-3-5-haiku@20241022", - "description": "(20241022-v1:0) specific version of Claude Haiku 3.5 - fastest model", + "description": "(20241022-v1:0) specific version of Claude Haiku 3.5", "input_cost": 8e-7, "output_cost": 4e-6 }, { "label": "claude-3-5-sonnet-v2@20241022", "name": "claude-3-5-sonnet-v2@20241022", - "description": "(20241022-v2:0) specific version of Claude Sonnet 3.5 - most intelligent model", + "description": "(20241022-v2:0) specific version of Claude Sonnet 3.5", "input_cost": 3e-6, "output_cost": 0.000015 }, @@ -1207,6 +1217,18 @@ "name": "mistral-large-2402", "input_cost": 0.002, "output_cost": 0.006 + }, + { + "label": "codestral-latsest", + "name": "codestral-latest", + "input_cost": 0.0002, + "output_cost": 0.0006 + }, + { + "label": "devstral-small-2505", + "name": "devstral-small-2505", + "input_cost": 0.0001, + "output_cost": 0.0003 } ] }, diff --git a/packages/components/nodes/agentflow/Agent/Agent.ts b/packages/components/nodes/agentflow/Agent/Agent.ts index 8cfdd6a46d9..55f565d0e8e 100644 --- a/packages/components/nodes/agentflow/Agent/Agent.ts +++ b/packages/components/nodes/agentflow/Agent/Agent.ts @@ -427,7 +427,8 @@ class Agent_Agentflow implements INode { return returnData } - const stores = await appDataSource.getRepository(databaseEntities['DocumentStore']).find() + const searchOptions = options.searchOptions || {} + const stores = await appDataSource.getRepository(databaseEntities['DocumentStore']).findBy(searchOptions) for (const store of stores) { if (store.status === 'UPSERTED') { const obj = { @@ -755,7 +756,7 @@ class Agent_Agentflow implements INode { /* * If this is the first node: * - Add images to messages if exist - * - Add user message + * - Add user message if it does not exist in the agentMessages array */ if (options.uploads) { const imageContents = await getUniqueImageMessages(options, messages, modelConfig) @@ -766,7 +767,7 @@ class Agent_Agentflow implements INode { } } - if (input && typeof input === 'string') { + if (input && typeof input === 'string' && !agentMessages.some((msg) => msg.role === 'user')) { messages.push({ role: 'user', content: input @@ -976,7 +977,19 @@ class Agent_Agentflow implements INode { inputMessages.push(...runtimeImageMessagesWithFileRef) } if (input && typeof input === 'string') { - inputMessages.push({ role: 'user', content: input }) + if (!enableMemory) { + if (!agentMessages.some((msg) => msg.role === 'user')) { + inputMessages.push({ role: 'user', content: input }) + } else { + agentMessages.map((msg) => { + if (msg.role === 'user') { + inputMessages.push({ role: 'user', content: msg.content }) + } + }) + } + } else { + inputMessages.push({ role: 'user', content: input }) + } } } @@ -1442,6 +1455,8 @@ class Agent_Agentflow implements INode { toolOutput: '', error: getErrorMessage(e) }) + sseStreamer?.streamUsedToolsEvent(chatId, flatten(usedTools)) + throw new Error(getErrorMessage(e)) } } } @@ -1678,6 +1693,8 @@ class Agent_Agentflow implements INode { toolOutput: '', error: getErrorMessage(e) }) + sseStreamer?.streamUsedToolsEvent(chatId, flatten(usedTools)) + throw new Error(getErrorMessage(e)) } } } diff --git a/packages/components/nodes/agentflow/CustomFunction/CustomFunction.ts b/packages/components/nodes/agentflow/CustomFunction/CustomFunction.ts index 6922c651bb5..f5d4bb615cd 100644 --- a/packages/components/nodes/agentflow/CustomFunction/CustomFunction.ts +++ b/packages/components/nodes/agentflow/CustomFunction/CustomFunction.ts @@ -152,12 +152,13 @@ class CustomFunction_Agentflow implements INode { newState = updateFlowState(state, _customFunctionUpdateState) } - const variables = await getVars(appDataSource, databaseEntities, nodeData) + const variables = await getVars(appDataSource, databaseEntities, nodeData, options) const flow = { chatflowId: options.chatflowid, sessionId: options.sessionId, chatId: options.chatId, - input + input, + state: newState } let sandbox: any = { diff --git a/packages/components/nodes/agentflow/ExecuteFlow/ExecuteFlow.ts b/packages/components/nodes/agentflow/ExecuteFlow/ExecuteFlow.ts index 26e5df7b60e..b1f62bb9389 100644 --- a/packages/components/nodes/agentflow/ExecuteFlow/ExecuteFlow.ts +++ b/packages/components/nodes/agentflow/ExecuteFlow/ExecuteFlow.ts @@ -127,7 +127,8 @@ class ExecuteFlow_Agentflow implements INode { return returnData } - const chatflows = await appDataSource.getRepository(databaseEntities['ChatFlow']).find() + const searchOptions = options.searchOptions || {} + const chatflows = await appDataSource.getRepository(databaseEntities['ChatFlow']).findBy(searchOptions) for (let i = 0; i < chatflows.length; i += 1) { let cfType = 'Chatflow' diff --git a/packages/components/nodes/agentflow/HTTP/HTTP.ts b/packages/components/nodes/agentflow/HTTP/HTTP.ts index 752d6dd0bcb..0027f1e2bcd 100644 --- a/packages/components/nodes/agentflow/HTTP/HTTP.ts +++ b/packages/components/nodes/agentflow/HTTP/HTTP.ts @@ -21,7 +21,7 @@ class HTTP_Agentflow implements INode { constructor() { this.label = 'HTTP' this.name = 'httpAgentflow' - this.version = 1.0 + this.version = 1.1 this.type = 'HTTP' this.category = 'Agent Flows' this.description = 'Send a HTTP request' @@ -72,6 +72,7 @@ class HTTP_Agentflow implements INode { label: 'Headers', name: 'headers', type: 'array', + acceptVariable: true, array: [ { label: 'Key', @@ -83,7 +84,8 @@ class HTTP_Agentflow implements INode { label: 'Value', name: 'value', type: 'string', - default: '' + default: '', + acceptVariable: true } ], optional: true @@ -92,6 +94,7 @@ class HTTP_Agentflow implements INode { label: 'Query Params', name: 'queryParams', type: 'array', + acceptVariable: true, array: [ { label: 'Key', @@ -103,7 +106,8 @@ class HTTP_Agentflow implements INode { label: 'Value', name: 'value', type: 'string', - default: '' + default: '', + acceptVariable: true } ], optional: true @@ -147,6 +151,7 @@ class HTTP_Agentflow implements INode { label: 'Body', name: 'body', type: 'array', + acceptVariable: true, show: { bodyType: ['xWwwFormUrlencoded', 'formData'] }, @@ -161,7 +166,8 @@ class HTTP_Agentflow implements INode { label: 'Value', name: 'value', type: 'string', - default: '' + default: '', + acceptVariable: true } ], optional: true @@ -220,14 +226,14 @@ class HTTP_Agentflow implements INode { // Add credentials if provided const credentialData = await getCredentialData(nodeData.credential ?? '', options) if (credentialData && Object.keys(credentialData).length !== 0) { - const basicAuthUsername = getCredentialParam('username', credentialData, nodeData) - const basicAuthPassword = getCredentialParam('password', credentialData, nodeData) + const basicAuthUsername = getCredentialParam('basicAuthUsername', credentialData, nodeData) + const basicAuthPassword = getCredentialParam('basicAuthPassword', credentialData, nodeData) const bearerToken = getCredentialParam('token', credentialData, nodeData) const apiKeyName = getCredentialParam('key', credentialData, nodeData) const apiKeyValue = getCredentialParam('value', credentialData, nodeData) // Determine which type of auth to use based on available credentials - if (basicAuthUsername && basicAuthPassword) { + if (basicAuthUsername || basicAuthPassword) { // Basic Auth const auth = Buffer.from(`${basicAuthUsername}:${basicAuthPassword}`).toString('base64') requestHeaders['Authorization'] = `Basic ${auth}` diff --git a/packages/components/nodes/agentflow/LLM/LLM.ts b/packages/components/nodes/agentflow/LLM/LLM.ts index 18f8d187d1a..8b400e7e056 100644 --- a/packages/components/nodes/agentflow/LLM/LLM.ts +++ b/packages/components/nodes/agentflow/LLM/LLM.ts @@ -410,7 +410,7 @@ class LLM_Agentflow implements INode { /* * If this is the first node: * - Add images to messages if exist - * - Add user message + * - Add user message if it does not exist in the llmMessages array */ if (options.uploads) { const imageContents = await getUniqueImageMessages(options, messages, modelConfig) @@ -421,7 +421,7 @@ class LLM_Agentflow implements INode { } } - if (input && typeof input === 'string') { + if (input && typeof input === 'string' && !llmMessages.some((msg) => msg.role === 'user')) { messages.push({ role: 'user', content: input @@ -545,7 +545,19 @@ class LLM_Agentflow implements INode { inputMessages.push(...runtimeImageMessagesWithFileRef) } if (input && typeof input === 'string') { - inputMessages.push({ role: 'user', content: input }) + if (!enableMemory) { + if (!llmMessages.some((msg) => msg.role === 'user')) { + inputMessages.push({ role: 'user', content: input }) + } else { + llmMessages.map((msg) => { + if (msg.role === 'user') { + inputMessages.push({ role: 'user', content: msg.content }) + } + }) + } + } else { + inputMessages.push({ role: 'user', content: input }) + } } } diff --git a/packages/components/nodes/agentflow/Retriever/Retriever.ts b/packages/components/nodes/agentflow/Retriever/Retriever.ts index 68420484e12..8524fcd12d3 100644 --- a/packages/components/nodes/agentflow/Retriever/Retriever.ts +++ b/packages/components/nodes/agentflow/Retriever/Retriever.ts @@ -119,7 +119,8 @@ class Retriever_Agentflow implements INode { return returnData } - const stores = await appDataSource.getRepository(databaseEntities['DocumentStore']).find() + const searchOptions = options.searchOptions || {} + const stores = await appDataSource.getRepository(databaseEntities['DocumentStore']).findBy(searchOptions) for (const store of stores) { if (store.status === 'UPSERTED') { const obj = { diff --git a/packages/components/nodes/agentflow/Start/Start.ts b/packages/components/nodes/agentflow/Start/Start.ts index 5f6bf8449c1..833e3b7c2eb 100644 --- a/packages/components/nodes/agentflow/Start/Start.ts +++ b/packages/components/nodes/agentflow/Start/Start.ts @@ -18,7 +18,7 @@ class Start_Agentflow implements INode { constructor() { this.label = 'Start' this.name = 'startAgentflow' - this.version = 1.0 + this.version = 1.1 this.type = 'Start' this.category = 'Agent Flows' this.description = 'Starting point of the agentflow' @@ -153,6 +153,13 @@ class Start_Agentflow implements INode { optional: true } ] + }, + { + label: 'Persist State', + name: 'startPersistState', + type: 'boolean', + description: 'Persist the state in the same session', + optional: true } ] } @@ -161,6 +168,7 @@ class Start_Agentflow implements INode { const _flowState = nodeData.inputs?.startState as string const startInputType = nodeData.inputs?.startInputType as string const startEphemeralMemory = nodeData.inputs?.startEphemeralMemory as boolean + const startPersistState = nodeData.inputs?.startPersistState as boolean let flowStateArray = [] if (_flowState) { @@ -176,6 +184,13 @@ class Start_Agentflow implements INode { flowState[state.key] = state.value } + const runtimeState = options.agentflowRuntime?.state as ICommonObject + if (startPersistState === true && runtimeState && Object.keys(runtimeState).length) { + for (const state in runtimeState) { + flowState[state] = runtimeState[state] + } + } + const inputData: ICommonObject = {} const outputData: ICommonObject = {} @@ -202,6 +217,10 @@ class Start_Agentflow implements INode { outputData.ephemeralMemory = true } + if (startPersistState) { + outputData.persistState = true + } + const returnOutput = { id: nodeData.id, name: this.name, diff --git a/packages/components/nodes/agentflow/utils.ts b/packages/components/nodes/agentflow/utils.ts index 1df71bcbcee..7ab5dc71dd6 100644 --- a/packages/components/nodes/agentflow/utils.ts +++ b/packages/components/nodes/agentflow/utils.ts @@ -18,7 +18,7 @@ export const addImagesToMessages = async ( for (const upload of imageUploads) { let bf = upload.data if (upload.type == 'stored-file') { - const contents = await getFileFromStorage(upload.name, options.chatflowid, options.chatId) + const contents = await getFileFromStorage(upload.name, options.orgId, options.chatflowid, options.chatId) // as the image is stored in the server, read the file and convert it to base64 bf = 'data:' + upload.mime + ';base64,' + contents.toString('base64') @@ -90,7 +90,7 @@ export const processMessagesWithImages = async ( hasImageReferences = true try { // Get file contents from storage - const contents = await getFileFromStorage(item.name, options.chatflowid, options.chatId) + const contents = await getFileFromStorage(item.name, options.orgId, options.chatflowid, options.chatId) // Create base64 data URL const base64Data = 'data:' + item.mime + ';base64,' + contents.toString('base64') @@ -140,7 +140,12 @@ export const replaceBase64ImagesWithFileReferences = ( ): BaseMessageLike[] => { // Create a deep copy to avoid mutating the original const updatedMessages = JSON.parse(JSON.stringify(messages)) - let imageMessagesIndex = 0 + + // Track positions in replacement arrays + let pastMessageIndex = 0 + let pastContentIndex = 0 + let uniqueMessageIndex = 0 + let uniqueContentIndex = 0 for (let i = 0; i < updatedMessages.length; i++) { const message = updatedMessages[i] @@ -148,17 +153,77 @@ export const replaceBase64ImagesWithFileReferences = ( for (let j = 0; j < message.content.length; j++) { const item = message.content[j] if (item.type === 'image_url') { - // Look for matching file reference in uniqueImageMessages or pastImageMessages - const imageMessage = - (uniqueImageMessages[imageMessagesIndex] as BaseMessage | undefined) || - (pastImageMessages[imageMessagesIndex] as BaseMessage | undefined) + // Try past images first + let replacement = null + + if (pastMessageIndex < pastImageMessages.length) { + const pastMessage = pastImageMessages[pastMessageIndex] as BaseMessage | undefined + if (pastMessage && Array.isArray(pastMessage.content)) { + if (pastContentIndex < pastMessage.content.length) { + replacement = pastMessage.content[pastContentIndex] + pastContentIndex++ + + // Move to next message if we've used all content in current one + if (pastContentIndex >= pastMessage.content.length) { + pastMessageIndex++ + pastContentIndex = 0 + } + } else { + // Current message has no more content, move to next + pastMessageIndex++ + pastContentIndex = 0 + + // Try again with the next message + if (pastMessageIndex < pastImageMessages.length) { + const nextPastMessage = pastImageMessages[pastMessageIndex] as BaseMessage | undefined + if (nextPastMessage && Array.isArray(nextPastMessage.content) && nextPastMessage.content.length > 0) { + replacement = nextPastMessage.content[0] + pastContentIndex = 1 + } + } + } + } + } + + // Try unique images if no past image replacement found + if (!replacement && uniqueMessageIndex < uniqueImageMessages.length) { + const uniqueMessage = uniqueImageMessages[uniqueMessageIndex] as BaseMessage | undefined + if (uniqueMessage && Array.isArray(uniqueMessage.content)) { + if (uniqueContentIndex < uniqueMessage.content.length) { + replacement = uniqueMessage.content[uniqueContentIndex] + uniqueContentIndex++ + + // Move to next message if we've used all content in current one + if (uniqueContentIndex >= uniqueMessage.content.length) { + uniqueMessageIndex++ + uniqueContentIndex = 0 + } + } else { + // Current message has no more content, move to next + uniqueMessageIndex++ + uniqueContentIndex = 0 + + // Try again with the next message + if (uniqueMessageIndex < uniqueImageMessages.length) { + const nextUniqueMessage = uniqueImageMessages[uniqueMessageIndex] as BaseMessage | undefined + if ( + nextUniqueMessage && + Array.isArray(nextUniqueMessage.content) && + nextUniqueMessage.content.length > 0 + ) { + replacement = nextUniqueMessage.content[0] + uniqueContentIndex = 1 + } + } + } + } + } - if (imageMessage && Array.isArray(imageMessage.content) && imageMessage.content[j]) { - const replaceContent = imageMessage.content[j] + // Apply replacement if found + if (replacement) { message.content[j] = { - ...replaceContent + ...replacement } - imageMessagesIndex++ } } } @@ -254,7 +319,7 @@ export const getPastChatHistoryImageMessages = async ( const imageContents: MessageContentImageUrl[] = [] for (const upload of uploads) { if (upload.type === 'stored-file' && upload.mime.startsWith('image/')) { - const fileData = await getFileFromStorage(upload.name, options.chatflowid, options.chatId) + const fileData = await getFileFromStorage(upload.name, options.orgId, options.chatflowid, options.chatId) // as the image is stored in the server, read the file and convert it to base64 const bf = 'data:' + upload.mime + ';base64,' + fileData.toString('base64') diff --git a/packages/components/nodes/agents/AirtableAgent/AirtableAgent.ts b/packages/components/nodes/agents/AirtableAgent/AirtableAgent.ts index d61ffd4be38..9c7cadf0b76 100644 --- a/packages/components/nodes/agents/AirtableAgent/AirtableAgent.ts +++ b/packages/components/nodes/agents/AirtableAgent/AirtableAgent.ts @@ -128,7 +128,7 @@ class Airtable_Agents implements INode { let base64String = Buffer.from(JSON.stringify(airtableData)).toString('base64') - const loggerHandler = new ConsoleCallbackHandler(options.logger) + const loggerHandler = new ConsoleCallbackHandler(options.logger, options?.orgId) const callbacks = await additionalCallbacks(nodeData, options) const pyodide = await LoadPyodide() @@ -163,7 +163,7 @@ json.dumps(my_dict)` const chain = new LLMChain({ llm: model, prompt: PromptTemplate.fromTemplate(systemPrompt), - verbose: process.env.DEBUG === 'true' + verbose: process.env.DEBUG === 'true' ? true : false }) const inputs = { dict: dataframeColDict, @@ -192,7 +192,7 @@ json.dumps(my_dict)` const chain = new LLMChain({ llm: model, prompt: PromptTemplate.fromTemplate(finalSystemPrompt), - verbose: process.env.DEBUG === 'true' + verbose: process.env.DEBUG === 'true' ? true : false }) const inputs = { question: input, diff --git a/packages/components/nodes/agents/CSVAgent/CSVAgent.ts b/packages/components/nodes/agents/CSVAgent/CSVAgent.ts index fbe85afc7b9..b94d91ad13f 100644 --- a/packages/components/nodes/agents/CSVAgent/CSVAgent.ts +++ b/packages/components/nodes/agents/CSVAgent/CSVAgent.ts @@ -97,7 +97,7 @@ class CSV_Agents implements INode { } } - const loggerHandler = new ConsoleCallbackHandler(options.logger) + const loggerHandler = new ConsoleCallbackHandler(options.logger, options?.orgId) const shouldStreamResponse = options.shouldStreamResponse const sseStreamer: IServerSideEventStreamer = options.sseStreamer as IServerSideEventStreamer const chatId = options.chatId @@ -114,11 +114,12 @@ class CSV_Agents implements INode { } else { files = [fileName] } + const orgId = options.orgId const chatflowid = options.chatflowid for (const file of files) { if (!file) continue - const fileData = await getFileFromStorage(file, chatflowid) + const fileData = await getFileFromStorage(file, orgId, chatflowid) base64String += fileData.toString('base64') } } else { @@ -170,7 +171,7 @@ json.dumps(my_dict)` const chain = new LLMChain({ llm: model, prompt: PromptTemplate.fromTemplate(systemPrompt), - verbose: process.env.DEBUG === 'true' + verbose: process.env.DEBUG === 'true' ? true : false }) const inputs = { dict: dataframeColDict, @@ -201,7 +202,7 @@ json.dumps(my_dict)` prompt: PromptTemplate.fromTemplate( systemMessagePrompt ? `${systemMessagePrompt}\n${finalSystemPrompt}` : finalSystemPrompt ), - verbose: process.env.DEBUG === 'true' + verbose: process.env.DEBUG === 'true' ? true : false }) const inputs = { question: input, diff --git a/packages/components/nodes/agents/ConversationalAgent/ConversationalAgent.ts b/packages/components/nodes/agents/ConversationalAgent/ConversationalAgent.ts index 4a5d910873e..8583826da46 100644 --- a/packages/components/nodes/agents/ConversationalAgent/ConversationalAgent.ts +++ b/packages/components/nodes/agents/ConversationalAgent/ConversationalAgent.ts @@ -132,7 +132,7 @@ class ConversationalAgent_Agents implements INode { } const executor = await prepareAgent(nodeData, options, { sessionId: this.sessionId, chatId: options.chatId, input }) - const loggerHandler = new ConsoleCallbackHandler(options.logger) + const loggerHandler = new ConsoleCallbackHandler(options.logger, options?.orgId) const callbacks = await additionalCallbacks(nodeData, options) let res: ChainValues = {} diff --git a/packages/components/nodes/agents/ConversationalRetrievalToolAgent/ConversationalRetrievalToolAgent.ts b/packages/components/nodes/agents/ConversationalRetrievalToolAgent/ConversationalRetrievalToolAgent.ts index 54698ca1347..a850db3076c 100644 --- a/packages/components/nodes/agents/ConversationalRetrievalToolAgent/ConversationalRetrievalToolAgent.ts +++ b/packages/components/nodes/agents/ConversationalRetrievalToolAgent/ConversationalRetrievalToolAgent.ts @@ -130,7 +130,7 @@ class ConversationalRetrievalToolAgent_Agents implements INode { const executor = await prepareAgent(nodeData, options, { sessionId: this.sessionId, chatId: options.chatId, input }) - const loggerHandler = new ConsoleCallbackHandler(options.logger) + const loggerHandler = new ConsoleCallbackHandler(options.logger, options?.orgId) const callbacks = await additionalCallbacks(nodeData, options) let res: ChainValues = {} @@ -288,7 +288,7 @@ const prepareAgent = async ( sessionId: flowObj?.sessionId, chatId: flowObj?.chatId, input: flowObj?.input, - verbose: process.env.DEBUG === 'true', + verbose: process.env.DEBUG === 'true' ? true : false, maxIterations: maxIterations ? parseFloat(maxIterations) : undefined }) diff --git a/packages/components/nodes/agents/LlamaIndexAgents/AnthropicAgent/AnthropicAgent_LlamaIndex.ts b/packages/components/nodes/agents/LlamaIndexAgents/AnthropicAgent/AnthropicAgent_LlamaIndex.ts index c218ff65481..257250020ed 100644 --- a/packages/components/nodes/agents/LlamaIndexAgents/AnthropicAgent/AnthropicAgent_LlamaIndex.ts +++ b/packages/components/nodes/agents/LlamaIndexAgents/AnthropicAgent/AnthropicAgent_LlamaIndex.ts @@ -2,6 +2,7 @@ import { flatten } from 'lodash' import { MessageContentTextDetail, ChatMessage, AnthropicAgent, Anthropic } from 'llamaindex' import { getBaseClasses } from '../../../../src/utils' import { FlowiseMemory, ICommonObject, IMessage, INode, INodeData, INodeParams, IUsedTool } from '../../../../src/Interface' +import { EvaluationRunTracerLlama } from '../../../../evaluation/EvaluationRunTracerLlama' class AnthropicAgent_LlamaIndex_Agents implements INode { label: string @@ -96,13 +97,16 @@ class AnthropicAgent_LlamaIndex_Agents implements INode { tools, llm: model, chatHistory: chatHistory, - verbose: process.env.DEBUG === 'true' + verbose: process.env.DEBUG === 'true' ? true : false }) + // these are needed for evaluation runs + await EvaluationRunTracerLlama.injectEvaluationMetadata(nodeData, options, agent) + let text = '' const usedTools: IUsedTool[] = [] - const response = await agent.chat({ message: input, chatHistory, verbose: process.env.DEBUG === 'true' }) + const response = await agent.chat({ message: input, chatHistory, verbose: process.env.DEBUG === 'true' ? true : false }) if (response.sources.length) { for (const sourceTool of response.sources) { diff --git a/packages/components/nodes/agents/LlamaIndexAgents/OpenAIToolAgent/OpenAIToolAgent_LlamaIndex.ts b/packages/components/nodes/agents/LlamaIndexAgents/OpenAIToolAgent/OpenAIToolAgent_LlamaIndex.ts index 07b2578bd87..657fed6bfeb 100644 --- a/packages/components/nodes/agents/LlamaIndexAgents/OpenAIToolAgent/OpenAIToolAgent_LlamaIndex.ts +++ b/packages/components/nodes/agents/LlamaIndexAgents/OpenAIToolAgent/OpenAIToolAgent_LlamaIndex.ts @@ -1,6 +1,7 @@ import { flatten } from 'lodash' import { ChatMessage, OpenAI, OpenAIAgent } from 'llamaindex' import { getBaseClasses } from '../../../../src/utils' +import { EvaluationRunTracerLlama } from '../../../../evaluation/EvaluationRunTracerLlama' import { FlowiseMemory, ICommonObject, @@ -107,9 +108,12 @@ class OpenAIFunctionAgent_LlamaIndex_Agents implements INode { tools, llm: model, chatHistory: chatHistory, - verbose: process.env.DEBUG === 'true' + verbose: process.env.DEBUG === 'true' ? true : false }) + // these are needed for evaluation runs + await EvaluationRunTracerLlama.injectEvaluationMetadata(nodeData, options, agent) + let text = '' let isStreamingStarted = false const usedTools: IUsedTool[] = [] @@ -119,10 +123,9 @@ class OpenAIFunctionAgent_LlamaIndex_Agents implements INode { message: input, chatHistory, stream: true, - verbose: process.env.DEBUG === 'true' + verbose: process.env.DEBUG === 'true' ? true : false }) for await (const chunk of stream) { - //console.log('chunk', chunk) text += chunk.response.delta if (!isStreamingStarted) { isStreamingStarted = true @@ -147,7 +150,7 @@ class OpenAIFunctionAgent_LlamaIndex_Agents implements INode { } } } else { - const response = await agent.chat({ message: input, chatHistory, verbose: process.env.DEBUG === 'true' }) + const response = await agent.chat({ message: input, chatHistory, verbose: process.env.DEBUG === 'true' ? true : false }) if (response.sources.length) { for (const sourceTool of response.sources) { usedTools.push({ diff --git a/packages/components/nodes/agents/OpenAIAssistant/OpenAIAssistant.ts b/packages/components/nodes/agents/OpenAIAssistant/OpenAIAssistant.ts index f8886983df7..75ca320f2d2 100644 --- a/packages/components/nodes/agents/OpenAIAssistant/OpenAIAssistant.ts +++ b/packages/components/nodes/agents/OpenAIAssistant/OpenAIAssistant.ts @@ -107,7 +107,11 @@ class OpenAIAssistant_Agents implements INode { return returnData } - const assistants = await appDataSource.getRepository(databaseEntities['Assistant']).find() + const searchOptions = options.searchOptions || {} + const assistants = await appDataSource.getRepository(databaseEntities['Assistant']).findBy({ + ...searchOptions, + type: 'OPENAI' + }) for (let i = 0; i < assistants.length; i += 1) { const assistantDetails = JSON.parse(assistants[i].details) @@ -130,13 +134,14 @@ class OpenAIAssistant_Agents implements INode { const selectedAssistantId = nodeData.inputs?.selectedAssistant as string const appDataSource = options.appDataSource as DataSource const databaseEntities = options.databaseEntities as IDatabaseEntity + const orgId = options.orgId const assistant = await appDataSource.getRepository(databaseEntities['Assistant']).findOneBy({ id: selectedAssistantId }) if (!assistant) { - options.logger.error(`Assistant ${selectedAssistantId} not found`) + options.logger.error(`[${orgId}]: Assistant ${selectedAssistantId} not found`) return } @@ -149,7 +154,7 @@ class OpenAIAssistant_Agents implements INode { chatId }) if (!chatmsg) { - options.logger.error(`Chat Message with Chat Id: ${chatId} not found`) + options.logger.error(`[${orgId}]: Chat Message with Chat Id: ${chatId} not found`) return } sessionId = chatmsg.sessionId @@ -160,21 +165,21 @@ class OpenAIAssistant_Agents implements INode { const credentialData = await getCredentialData(assistant.credential ?? '', options) const openAIApiKey = getCredentialParam('openAIApiKey', credentialData, nodeData) if (!openAIApiKey) { - options.logger.error(`OpenAI ApiKey not found`) + options.logger.error(`[${orgId}]: OpenAI ApiKey not found`) return } const openai = new OpenAI({ apiKey: openAIApiKey }) - options.logger.info(`Clearing OpenAI Thread ${sessionId}`) + options.logger.info(`[${orgId}]: Clearing OpenAI Thread ${sessionId}`) try { if (sessionId && sessionId.startsWith('thread_')) { await openai.beta.threads.del(sessionId) - options.logger.info(`Successfully cleared OpenAI Thread ${sessionId}`) + options.logger.info(`[${orgId}]: Successfully cleared OpenAI Thread ${sessionId}`) } else { - options.logger.error(`Error clearing OpenAI Thread ${sessionId}`) + options.logger.error(`[${orgId}]: Error clearing OpenAI Thread ${sessionId}`) } } catch (e) { - options.logger.error(`Error clearing OpenAI Thread ${sessionId}`) + options.logger.error(`[${orgId}]: Error clearing OpenAI Thread ${sessionId}`) } } @@ -190,6 +195,17 @@ class OpenAIAssistant_Agents implements INode { const shouldStreamResponse = options.shouldStreamResponse const sseStreamer: IServerSideEventStreamer = options.sseStreamer as IServerSideEventStreamer const chatId = options.chatId + const checkStorage = options.checkStorage + ? (options.checkStorage as (orgId: string, subscriptionId: string, usageCacheManager: any) => Promise) + : undefined + const updateStorageUsage = options.updateStorageUsage + ? (options.updateStorageUsage as ( + orgId: string, + workspaceId: string, + totalSize: number, + usageCacheManager: any + ) => Promise) + : undefined if (moderations && moderations.length > 0) { try { @@ -380,17 +396,30 @@ class OpenAIAssistant_Agents implements INode { // eslint-disable-next-line no-useless-escape const fileName = cited_file.filename.split(/[\/\\]/).pop() ?? cited_file.filename if (!disableFileDownload) { - filePath = await downloadFile( + if (checkStorage) + await checkStorage(options.orgId, options.subscriptionId, options.usageCacheManager) + + const { path, totalSize } = await downloadFile( openAIApiKey, cited_file, fileName, + options.orgId, options.chatflowid, options.chatId ) + filePath = path fileAnnotations.push({ filePath, fileName }) + + if (updateStorageUsage) + await updateStorageUsage( + options.orgId, + options.workspaceId, + totalSize, + options.usageCacheManager + ) } } else { const file_path = (annotation as OpenAI.Beta.Threads.Messages.FilePathAnnotation).file_path @@ -399,17 +428,30 @@ class OpenAIAssistant_Agents implements INode { // eslint-disable-next-line no-useless-escape const fileName = cited_file.filename.split(/[\/\\]/).pop() ?? cited_file.filename if (!disableFileDownload) { - filePath = await downloadFile( + if (checkStorage) + await checkStorage(options.orgId, options.subscriptionId, options.usageCacheManager) + + const { path, totalSize } = await downloadFile( openAIApiKey, cited_file, fileName, + options.orgId, options.chatflowid, options.chatId ) + filePath = path fileAnnotations.push({ filePath, fileName }) + + if (updateStorageUsage) + await updateStorageUsage( + options.orgId, + options.workspaceId, + totalSize, + options.usageCacheManager + ) } } } @@ -467,15 +509,21 @@ class OpenAIAssistant_Agents implements INode { const fileId = chunk.image_file.file_id const fileObj = await openai.files.retrieve(fileId) - const filePath = await downloadImg( + if (checkStorage) await checkStorage(options.orgId, options.subscriptionId, options.usageCacheManager) + + const { filePath, totalSize } = await downloadImg( openai, fileId, `${fileObj.filename}.png`, + options.orgId, options.chatflowid, options.chatId ) artifacts.push({ type: 'png', data: filePath }) + if (updateStorageUsage) + await updateStorageUsage(options.orgId, options.workspaceId, totalSize, options.usageCacheManager) + if (!isStreamingStarted) { isStreamingStarted = true if (sseStreamer) { @@ -776,7 +824,21 @@ class OpenAIAssistant_Agents implements INode { // eslint-disable-next-line no-useless-escape const fileName = cited_file.filename.split(/[\/\\]/).pop() ?? cited_file.filename if (!disableFileDownload) { - filePath = await downloadFile(openAIApiKey, cited_file, fileName, options.chatflowid, options.chatId) + if (checkStorage) await checkStorage(options.orgId, options.subscriptionId, options.usageCacheManager) + + const { path, totalSize } = await downloadFile( + openAIApiKey, + cited_file, + fileName, + options.orgId, + options.chatflowid, + options.chatId + ) + filePath = path + + if (updateStorageUsage) + await updateStorageUsage(options.orgId, options.workspaceId, totalSize, options.usageCacheManager) + fileAnnotations.push({ filePath, fileName @@ -789,13 +851,27 @@ class OpenAIAssistant_Agents implements INode { // eslint-disable-next-line no-useless-escape const fileName = cited_file.filename.split(/[\/\\]/).pop() ?? cited_file.filename if (!disableFileDownload) { - filePath = await downloadFile( + if (checkStorage) + await checkStorage(options.orgId, options.subscriptionId, options.usageCacheManager) + + const { path, totalSize } = await downloadFile( openAIApiKey, cited_file, fileName, + options.orgId, options.chatflowid, options.chatId ) + filePath = path + + if (updateStorageUsage) + await updateStorageUsage( + options.orgId, + options.workspaceId, + totalSize, + options.usageCacheManager + ) + fileAnnotations.push({ filePath, fileName @@ -822,7 +898,20 @@ class OpenAIAssistant_Agents implements INode { const fileId = content.image_file.file_id const fileObj = await openai.files.retrieve(fileId) - const filePath = await downloadImg(openai, fileId, `${fileObj.filename}.png`, options.chatflowid, options.chatId) + if (checkStorage) await checkStorage(options.orgId, options.subscriptionId, options.usageCacheManager) + + const { filePath, totalSize } = await downloadImg( + openai, + fileId, + `${fileObj.filename}.png`, + options.orgId, + options.chatflowid, + options.chatId + ) + + if (updateStorageUsage) + await updateStorageUsage(options.orgId, options.workspaceId, totalSize, options.usageCacheManager) + artifacts.push({ type: 'png', data: filePath }) } } @@ -847,7 +936,13 @@ class OpenAIAssistant_Agents implements INode { } } -const downloadImg = async (openai: OpenAI, fileId: string, fileName: string, ...paths: string[]) => { +const downloadImg = async ( + openai: OpenAI, + fileId: string, + fileName: string, + orgId: string, + ...paths: string[] +): Promise<{ filePath: string; totalSize: number }> => { const response = await openai.files.content(fileId) // Extract the binary data from the Response object @@ -857,12 +952,18 @@ const downloadImg = async (openai: OpenAI, fileId: string, fileName: string, ... const image_data_buffer = Buffer.from(image_data) const mime = 'image/png' - const res = await addSingleFileToStorage(mime, image_data_buffer, fileName, ...paths) + const { path, totalSize } = await addSingleFileToStorage(mime, image_data_buffer, fileName, orgId, ...paths) - return res + return { filePath: path, totalSize } } -const downloadFile = async (openAIApiKey: string, fileObj: any, fileName: string, ...paths: string[]) => { +const downloadFile = async ( + openAIApiKey: string, + fileObj: any, + fileName: string, + orgId: string, + ...paths: string[] +): Promise<{ path: string; totalSize: number }> => { try { const response = await fetch(`https://api.openai.com/v1/files/${fileObj.id}/content`, { method: 'GET', @@ -880,10 +981,12 @@ const downloadFile = async (openAIApiKey: string, fileObj: any, fileName: string const data_buffer = Buffer.from(data) const mime = 'application/octet-stream' - return await addSingleFileToStorage(mime, data_buffer, fileName, ...paths) + const { path, totalSize } = await addSingleFileToStorage(mime, data_buffer, fileName, orgId, ...paths) + + return { path, totalSize } } catch (error) { console.error('Error downloading or writing the file:', error) - return '' + return { path: '', totalSize: 0 } } } diff --git a/packages/components/nodes/agents/ReActAgentLLM/ReActAgentLLM.ts b/packages/components/nodes/agents/ReActAgentLLM/ReActAgentLLM.ts index 1ab071f712e..bc7b0e94580 100644 --- a/packages/components/nodes/agents/ReActAgentLLM/ReActAgentLLM.ts +++ b/packages/components/nodes/agents/ReActAgentLLM/ReActAgentLLM.ts @@ -97,7 +97,7 @@ class ReActAgentLLM_Agents implements INode { const executor = new AgentExecutor({ agent, tools, - verbose: process.env.DEBUG === 'true', + verbose: process.env.DEBUG === 'true' ? true : false, maxIterations: maxIterations ? parseFloat(maxIterations) : undefined }) diff --git a/packages/components/nodes/agents/ToolAgent/ToolAgent.ts b/packages/components/nodes/agents/ToolAgent/ToolAgent.ts index 4dce1a46052..5244f76e5ac 100644 --- a/packages/components/nodes/agents/ToolAgent/ToolAgent.ts +++ b/packages/components/nodes/agents/ToolAgent/ToolAgent.ts @@ -143,7 +143,7 @@ class ToolAgent_Agents implements INode { const executor = await prepareAgent(nodeData, options, { sessionId: this.sessionId, chatId: options.chatId, input }) - const loggerHandler = new ConsoleCallbackHandler(options.logger) + const loggerHandler = new ConsoleCallbackHandler(options.logger, options?.orgId) const callbacks = await additionalCallbacks(nodeData, options) // Add custom streaming handler if detailed streaming is enabled @@ -370,7 +370,7 @@ const prepareAgent = async ( sessionId: flowObj?.sessionId, chatId: flowObj?.chatId, input: flowObj?.input, - verbose: process.env.DEBUG === 'true', + verbose: process.env.DEBUG === 'true' ? true : false, maxIterations: maxIterations ? parseFloat(maxIterations) : undefined }) diff --git a/packages/components/nodes/agents/XMLAgent/XMLAgent.ts b/packages/components/nodes/agents/XMLAgent/XMLAgent.ts index 92f46feb560..f8377a66b79 100644 --- a/packages/components/nodes/agents/XMLAgent/XMLAgent.ts +++ b/packages/components/nodes/agents/XMLAgent/XMLAgent.ts @@ -138,7 +138,7 @@ class XMLAgent_Agents implements INode { } const executor = await prepareAgent(nodeData, options, { sessionId: this.sessionId, chatId: options.chatId, input }) - const loggerHandler = new ConsoleCallbackHandler(options.logger) + const loggerHandler = new ConsoleCallbackHandler(options.logger, options?.orgId) const callbacks = await additionalCallbacks(nodeData, options) let res: ChainValues = {} @@ -278,7 +278,7 @@ const prepareAgent = async ( chatId: flowObj?.chatId, input: flowObj?.input, isXML: true, - verbose: process.env.DEBUG === 'true', + verbose: process.env.DEBUG === 'true' ? true : false, maxIterations: maxIterations ? parseFloat(maxIterations) : undefined }) diff --git a/packages/components/nodes/chains/ApiChain/GETApiChain.ts b/packages/components/nodes/chains/ApiChain/GETApiChain.ts index 222970f44c5..ba9ac40a9b7 100644 --- a/packages/components/nodes/chains/ApiChain/GETApiChain.ts +++ b/packages/components/nodes/chains/ApiChain/GETApiChain.ts @@ -98,7 +98,7 @@ class GETApiChain_Chains implements INode { const ansPrompt = nodeData.inputs?.ansPrompt as string const chain = await getAPIChain(apiDocs, model, headers, urlPrompt, ansPrompt) - const loggerHandler = new ConsoleCallbackHandler(options.logger) + const loggerHandler = new ConsoleCallbackHandler(options.logger, options?.orgId) const callbacks = await additionalCallbacks(nodeData, options) const shouldStreamResponse = options.shouldStreamResponse const sseStreamer: IServerSideEventStreamer = options.sseStreamer as IServerSideEventStreamer @@ -129,7 +129,7 @@ const getAPIChain = async (documents: string, llm: BaseLanguageModel, headers: s const chain = APIChain.fromLLMAndAPIDocs(llm, documents, { apiUrlPrompt, apiResponsePrompt, - verbose: process.env.DEBUG === 'true', + verbose: process.env.DEBUG === 'true' ? true : false, headers: typeof headers === 'object' ? headers : headers ? JSON.parse(headers) : {} }) return chain diff --git a/packages/components/nodes/chains/ApiChain/OpenAPIChain.ts b/packages/components/nodes/chains/ApiChain/OpenAPIChain.ts index b5314ffe3db..91c1c149277 100644 --- a/packages/components/nodes/chains/ApiChain/OpenAPIChain.ts +++ b/packages/components/nodes/chains/ApiChain/OpenAPIChain.ts @@ -71,7 +71,7 @@ class OpenApiChain_Chains implements INode { async run(nodeData: INodeData, input: string, options: ICommonObject): Promise { const chain = await initChain(nodeData, options) - const loggerHandler = new ConsoleCallbackHandler(options.logger) + const loggerHandler = new ConsoleCallbackHandler(options.logger, options?.orgId) const callbacks = await additionalCallbacks(nodeData, options) const moderations = nodeData.inputs?.inputModeration as Moderation[] const shouldStreamResponse = options.shouldStreamResponse @@ -114,8 +114,9 @@ const initChain = async (nodeData: INodeData, options: ICommonObject) => { } else { if (yamlFileBase64.startsWith('FILE-STORAGE::')) { const file = yamlFileBase64.replace('FILE-STORAGE::', '') + const orgId = options.orgId const chatflowid = options.chatflowid - const fileData = await getFileFromStorage(file, chatflowid) + const fileData = await getFileFromStorage(file, orgId, chatflowid) yamlString = fileData.toString() } else { const splitDataURI = yamlFileBase64.split(',') @@ -128,7 +129,7 @@ const initChain = async (nodeData: INodeData, options: ICommonObject) => { return await createOpenAPIChain(yamlString, { llm: model, headers: typeof headers === 'object' ? headers : headers ? JSON.parse(headers) : {}, - verbose: process.env.DEBUG === 'true' + verbose: process.env.DEBUG === 'true' ? true : false }) } diff --git a/packages/components/nodes/chains/ApiChain/POSTApiChain.ts b/packages/components/nodes/chains/ApiChain/POSTApiChain.ts index da033d2d24f..20e5447bb28 100644 --- a/packages/components/nodes/chains/ApiChain/POSTApiChain.ts +++ b/packages/components/nodes/chains/ApiChain/POSTApiChain.ts @@ -87,7 +87,7 @@ class POSTApiChain_Chains implements INode { const ansPrompt = nodeData.inputs?.ansPrompt as string const chain = await getAPIChain(apiDocs, model, headers, urlPrompt, ansPrompt) - const loggerHandler = new ConsoleCallbackHandler(options.logger) + const loggerHandler = new ConsoleCallbackHandler(options.logger, options?.orgId) const callbacks = await additionalCallbacks(nodeData, options) const shouldStreamResponse = options.shouldStreamResponse @@ -119,7 +119,7 @@ const getAPIChain = async (documents: string, llm: BaseLanguageModel, headers: s const chain = APIChain.fromLLMAndAPIDocs(llm, documents, { apiUrlPrompt, apiResponsePrompt, - verbose: process.env.DEBUG === 'true', + verbose: process.env.DEBUG === 'true' ? true : false, headers: typeof headers === 'object' ? headers : headers ? JSON.parse(headers) : {} }) return chain diff --git a/packages/components/nodes/chains/ConversationChain/ConversationChain.ts b/packages/components/nodes/chains/ConversationChain/ConversationChain.ts index f0d3de7aa55..04e36daf31a 100644 --- a/packages/components/nodes/chains/ConversationChain/ConversationChain.ts +++ b/packages/components/nodes/chains/ConversationChain/ConversationChain.ts @@ -132,7 +132,7 @@ class ConversationChain_Chains implements INode { } } - const loggerHandler = new ConsoleCallbackHandler(options.logger) + const loggerHandler = new ConsoleCallbackHandler(options.logger, options?.orgId) const additionalCallback = await additionalCallbacks(nodeData, options) let res = '' diff --git a/packages/components/nodes/chains/ConversationalRetrievalQAChain/ConversationalRetrievalQAChain.ts b/packages/components/nodes/chains/ConversationalRetrievalQAChain/ConversationalRetrievalQAChain.ts index 29528ae5c69..31dfa8b1a90 100644 --- a/packages/components/nodes/chains/ConversationalRetrievalQAChain/ConversationalRetrievalQAChain.ts +++ b/packages/components/nodes/chains/ConversationalRetrievalQAChain/ConversationalRetrievalQAChain.ts @@ -185,6 +185,7 @@ class ConversationalRetrievalQAChain_Chains implements INode { const shouldStreamResponse = options.shouldStreamResponse const sseStreamer: IServerSideEventStreamer = options.sseStreamer as IServerSideEventStreamer const chatId = options.chatId + const orgId = options.orgId let customResponsePrompt = responsePrompt // If the deprecated systemMessagePrompt is still exists @@ -200,7 +201,8 @@ class ConversationalRetrievalQAChain_Chains implements INode { memoryKey: 'chat_history', appDataSource, databaseEntities, - chatflowid + chatflowid, + orgId }) } @@ -220,7 +222,7 @@ class ConversationalRetrievalQAChain_Chains implements INode { const history = ((await memory.getChatMessages(this.sessionId, false, prependMessages)) as IMessage[]) ?? [] - const loggerHandler = new ConsoleCallbackHandler(options.logger) + const loggerHandler = new ConsoleCallbackHandler(options.logger, options?.orgId) const additionalCallback = await additionalCallbacks(nodeData, options) let callbacks = [loggerHandler, ...additionalCallback] @@ -407,18 +409,21 @@ interface BufferMemoryExtendedInput { appDataSource: DataSource databaseEntities: IDatabaseEntity chatflowid: string + orgId: string } class BufferMemory extends FlowiseMemory implements MemoryMethods { appDataSource: DataSource databaseEntities: IDatabaseEntity chatflowid: string + orgId: string constructor(fields: BufferMemoryInput & BufferMemoryExtendedInput) { super(fields) this.appDataSource = fields.appDataSource this.databaseEntities = fields.databaseEntities this.chatflowid = fields.chatflowid + this.orgId = fields.orgId } async getChatMessages( @@ -443,7 +448,7 @@ class BufferMemory extends FlowiseMemory implements MemoryMethods { } if (returnBaseMessages) { - return await mapChatMessageToBaseMessage(chatMessage) + return await mapChatMessageToBaseMessage(chatMessage, this.orgId) } let returnIMessages: IMessage[] = [] diff --git a/packages/components/nodes/chains/GraphCypherQAChain/GraphCypherQAChain.ts b/packages/components/nodes/chains/GraphCypherQAChain/GraphCypherQAChain.ts index fb7dc4a7d86..5a2f16c093f 100644 --- a/packages/components/nodes/chains/GraphCypherQAChain/GraphCypherQAChain.ts +++ b/packages/components/nodes/chains/GraphCypherQAChain/GraphCypherQAChain.ts @@ -215,7 +215,7 @@ class GraphCypherQA_Chain implements INode { query: input } - const loggerHandler = new ConsoleCallbackHandler(options.logger) + const loggerHandler = new ConsoleCallbackHandler(options.logger, options?.orgId) const callbackHandlers = await additionalCallbacks(nodeData, options) let callbacks = [loggerHandler, ...callbackHandlers] diff --git a/packages/components/nodes/chains/LLMChain/LLMChain.ts b/packages/components/nodes/chains/LLMChain/LLMChain.ts index f72603635e6..8013581264a 100644 --- a/packages/components/nodes/chains/LLMChain/LLMChain.ts +++ b/packages/components/nodes/chains/LLMChain/LLMChain.ts @@ -167,7 +167,7 @@ const runPrediction = async ( nodeData: INodeData, disableStreaming?: boolean ) => { - const loggerHandler = new ConsoleCallbackHandler(options.logger) + const loggerHandler = new ConsoleCallbackHandler(options.logger, options?.orgId) const callbacks = await additionalCallbacks(nodeData, options) const moderations = nodeData.inputs?.inputModeration as Moderation[] diff --git a/packages/components/nodes/chains/MultiPromptChain/MultiPromptChain.ts b/packages/components/nodes/chains/MultiPromptChain/MultiPromptChain.ts index 7863981c25e..a2fae0db91a 100644 --- a/packages/components/nodes/chains/MultiPromptChain/MultiPromptChain.ts +++ b/packages/components/nodes/chains/MultiPromptChain/MultiPromptChain.ts @@ -66,7 +66,7 @@ class MultiPromptChain_Chains implements INode { promptNames, promptDescriptions, promptTemplates, - llmChainOpts: { verbose: process.env.DEBUG === 'true' } + llmChainOpts: { verbose: process.env.DEBUG === 'true' ? true : false } }) return chain @@ -95,7 +95,7 @@ class MultiPromptChain_Chains implements INode { } const obj = { input } - const loggerHandler = new ConsoleCallbackHandler(options.logger) + const loggerHandler = new ConsoleCallbackHandler(options.logger, options?.orgId) const callbacks = await additionalCallbacks(nodeData, options) if (shouldStreamResponse) { diff --git a/packages/components/nodes/chains/MultiRetrievalQAChain/MultiRetrievalQAChain.ts b/packages/components/nodes/chains/MultiRetrievalQAChain/MultiRetrievalQAChain.ts index eed73f4cc1e..b0c90bad105 100644 --- a/packages/components/nodes/chains/MultiRetrievalQAChain/MultiRetrievalQAChain.ts +++ b/packages/components/nodes/chains/MultiRetrievalQAChain/MultiRetrievalQAChain.ts @@ -74,7 +74,7 @@ class MultiRetrievalQAChain_Chains implements INode { retrieverNames, retrieverDescriptions, retrievers, - retrievalQAChainOpts: { verbose: process.env.DEBUG === 'true', returnSourceDocuments } + retrievalQAChainOpts: { verbose: process.env.DEBUG === 'true' ? true : false, returnSourceDocuments } }) return chain } @@ -101,7 +101,7 @@ class MultiRetrievalQAChain_Chains implements INode { } } const obj = { input } - const loggerHandler = new ConsoleCallbackHandler(options.logger) + const loggerHandler = new ConsoleCallbackHandler(options.logger, options?.orgId) const callbacks = await additionalCallbacks(nodeData, options) if (shouldStreamResponse) { diff --git a/packages/components/nodes/chains/RetrievalQAChain/RetrievalQAChain.ts b/packages/components/nodes/chains/RetrievalQAChain/RetrievalQAChain.ts index 8e7453d7524..ae023862f75 100644 --- a/packages/components/nodes/chains/RetrievalQAChain/RetrievalQAChain.ts +++ b/packages/components/nodes/chains/RetrievalQAChain/RetrievalQAChain.ts @@ -53,7 +53,7 @@ class RetrievalQAChain_Chains implements INode { const model = nodeData.inputs?.model as BaseLanguageModel const vectorStoreRetriever = nodeData.inputs?.vectorStoreRetriever as BaseRetriever - const chain = RetrievalQAChain.fromLLM(model, vectorStoreRetriever, { verbose: process.env.DEBUG === 'true' }) + const chain = RetrievalQAChain.fromLLM(model, vectorStoreRetriever, { verbose: process.env.DEBUG === 'true' ? true : false }) return chain } @@ -80,7 +80,7 @@ class RetrievalQAChain_Chains implements INode { const obj = { query: input } - const loggerHandler = new ConsoleCallbackHandler(options.logger) + const loggerHandler = new ConsoleCallbackHandler(options.logger, options?.orgId) const callbacks = await additionalCallbacks(nodeData, options) if (shouldStreamResponse) { diff --git a/packages/components/nodes/chains/SqlDatabaseChain/SqlDatabaseChain.ts b/packages/components/nodes/chains/SqlDatabaseChain/SqlDatabaseChain.ts index cc062fb768d..539e2031d72 100644 --- a/packages/components/nodes/chains/SqlDatabaseChain/SqlDatabaseChain.ts +++ b/packages/components/nodes/chains/SqlDatabaseChain/SqlDatabaseChain.ts @@ -194,7 +194,7 @@ class SqlDatabaseChain_Chains implements INode { topK, customPrompt ) - const loggerHandler = new ConsoleCallbackHandler(options.logger) + const loggerHandler = new ConsoleCallbackHandler(options.logger, options?.orgId) const callbacks = await additionalCallbacks(nodeData, options) if (shouldStreamResponse) { @@ -241,7 +241,7 @@ const getSQLDBChain = async ( const obj: SqlDatabaseChainInput = { llm, database: db, - verbose: process.env.DEBUG === 'true', + verbose: process.env.DEBUG === 'true' ? true : false, topK: topK } diff --git a/packages/components/nodes/chains/VectorDBQAChain/VectorDBQAChain.ts b/packages/components/nodes/chains/VectorDBQAChain/VectorDBQAChain.ts index ec1b2cf8bb5..2fd36919ba0 100644 --- a/packages/components/nodes/chains/VectorDBQAChain/VectorDBQAChain.ts +++ b/packages/components/nodes/chains/VectorDBQAChain/VectorDBQAChain.ts @@ -55,7 +55,7 @@ class VectorDBQAChain_Chains implements INode { const chain = VectorDBQAChain.fromLLM(model, vectorStore, { k: (vectorStore as any)?.k ?? 4, - verbose: process.env.DEBUG === 'true' + verbose: process.env.DEBUG === 'true' ? true : false }) return chain } @@ -84,7 +84,7 @@ class VectorDBQAChain_Chains implements INode { query: input } - const loggerHandler = new ConsoleCallbackHandler(options.logger) + const loggerHandler = new ConsoleCallbackHandler(options.logger, options?.orgId) const callbacks = await additionalCallbacks(nodeData, options) if (shouldStreamResponse) { diff --git a/packages/components/nodes/chatmodels/AzureChatOpenAI/README.md b/packages/components/nodes/chatmodels/AzureChatOpenAI/README.md index f12f42dc198..3bfd3396457 100644 --- a/packages/components/nodes/chatmodels/AzureChatOpenAI/README.md +++ b/packages/components/nodes/chatmodels/AzureChatOpenAI/README.md @@ -4,13 +4,13 @@ Azure OpenAI Chat Model integration for Flowise ## 🌱 Env Variables -| Variable | Description | Type | Default | -| ---------------------------- | ----------------------------------------------------------------------------------------------- | ------------------------------------------------ | ----------------------------------- | -| AZURE_OPENAI_API_KEY | Default `credential.azureOpenAIApiKey` for Azure OpenAI Model | String | | -| AZURE_OPENAI_API_INSTANCE_NAME | Default `credential.azureOpenAIApiInstanceName` for Azure OpenAI Model | String | | -| AZURE_OPENAI_API_DEPLOYMENT_NAME | Default `credential.azureOpenAIApiDeploymentName` for Azure OpenAI Model | String | | -| AZURE_OPENAI_API_VERSION | Default `credential.azureOpenAIApiVersion` for Azure OpenAI Model | String | | +| Variable | Description | Type | Default | +| -------------------------------- | ------------------------------------------------------------------------ | ------ | ------- | +| AZURE_OPENAI_API_KEY | Default `credential.azureOpenAIApiKey` for Azure OpenAI Model | String | | +| AZURE_OPENAI_API_INSTANCE_NAME | Default `credential.azureOpenAIApiInstanceName` for Azure OpenAI Model | String | | +| AZURE_OPENAI_API_DEPLOYMENT_NAME | Default `credential.azureOpenAIApiDeploymentName` for Azure OpenAI Model | String | | +| AZURE_OPENAI_API_VERSION | Default `credential.azureOpenAIApiVersion` for Azure OpenAI Model | String | | ## License -Source code in this repository is made available under the [Apache License Version 2.0](https://github.com/FlowiseAI/Flowise/blob/master/LICENSE.md). \ No newline at end of file +Source code in this repository is made available under the [Apache License Version 2.0](https://github.com/FlowiseAI/Flowise/blob/master/LICENSE.md). diff --git a/packages/components/nodes/chatmodels/ChatGoogleGenerativeAI/ChatGoogleGenerativeAI.ts b/packages/components/nodes/chatmodels/ChatGoogleGenerativeAI/ChatGoogleGenerativeAI.ts index 9d15abba6a9..b42ab4077c7 100644 --- a/packages/components/nodes/chatmodels/ChatGoogleGenerativeAI/ChatGoogleGenerativeAI.ts +++ b/packages/components/nodes/chatmodels/ChatGoogleGenerativeAI/ChatGoogleGenerativeAI.ts @@ -216,6 +216,10 @@ class GoogleGenerativeAI_ChatModels implements INode { streaming: streaming ?? true } + // this extra metadata is needed, as langchain does not show the model name in the callbacks. + obj.metadata = { + fw_model_name: customModelName || modelName + } if (maxOutputTokens) obj.maxOutputTokens = parseInt(maxOutputTokens, 10) if (topP) obj.topP = parseFloat(topP) if (topK) obj.topK = parseFloat(topK) diff --git a/packages/components/nodes/chatmodels/ChatGoogleGenerativeAI/FlowiseChatGoogleGenerativeAI.ts b/packages/components/nodes/chatmodels/ChatGoogleGenerativeAI/FlowiseChatGoogleGenerativeAI.ts index 4824810ebde..8854485c91c 100644 --- a/packages/components/nodes/chatmodels/ChatGoogleGenerativeAI/FlowiseChatGoogleGenerativeAI.ts +++ b/packages/components/nodes/chatmodels/ChatGoogleGenerativeAI/FlowiseChatGoogleGenerativeAI.ts @@ -92,7 +92,7 @@ class LangchainChatGoogleGenerativeAI private contextCache?: FlowiseGoogleAICacheManager get _isMultimodalModel() { - return this.modelName.includes('vision') || this.modelName.startsWith('gemini-1.5') + return true } constructor(fields?: GoogleGenerativeAIChatInput) { diff --git a/packages/components/nodes/chatmodels/ChatGoogleVertexAI/ChatGoogleVertexAI.ts b/packages/components/nodes/chatmodels/ChatGoogleVertexAI/ChatGoogleVertexAI.ts index 5b813984bcb..44fed0b6a3a 100644 --- a/packages/components/nodes/chatmodels/ChatGoogleVertexAI/ChatGoogleVertexAI.ts +++ b/packages/components/nodes/chatmodels/ChatGoogleVertexAI/ChatGoogleVertexAI.ts @@ -99,7 +99,8 @@ class GoogleVertexAI_ChatModels implements INode { type: 'string', placeholder: 'gemini-1.5-pro-exp-0801', description: 'Custom model name to use. If provided, it will override the model selected', - additionalParams: true + additionalParams: true, + optional: true }, { label: 'Temperature', diff --git a/packages/components/nodes/chatmodels/ChatIBMWatsonx/ChatIBMWatsonx.ts b/packages/components/nodes/chatmodels/ChatIBMWatsonx/ChatIBMWatsonx.ts index f4655ace6cd..00adc75fb05 100644 --- a/packages/components/nodes/chatmodels/ChatIBMWatsonx/ChatIBMWatsonx.ts +++ b/packages/components/nodes/chatmodels/ChatIBMWatsonx/ChatIBMWatsonx.ts @@ -161,12 +161,13 @@ class ChatIBMWatsonx_ChatModels implements INode { watsonxAIBearerToken } - const obj: ChatWatsonxInput & WatsonxAuth = { + const obj = { ...auth, streaming: streaming ?? true, model: modelName, temperature: temperature ? parseFloat(temperature) : undefined - } + } as ChatWatsonxInput & WatsonxAuth + if (cache) obj.cache = cache if (maxTokens) obj.maxTokens = parseInt(maxTokens, 10) if (frequencyPenalty) obj.frequencyPenalty = parseInt(frequencyPenalty, 10) diff --git a/packages/components/nodes/documentloaders/Cheerio/Cheerio.ts b/packages/components/nodes/documentloaders/Cheerio/Cheerio.ts index bf5bf3387c3..9ef16850c02 100644 --- a/packages/components/nodes/documentloaders/Cheerio/Cheerio.ts +++ b/packages/components/nodes/documentloaders/Cheerio/Cheerio.ts @@ -123,6 +123,7 @@ class Cheerio_DocumentLoaders implements INode { const selectedLinks = nodeData.inputs?.selectedLinks as string[] let limit = parseInt(nodeData.inputs?.limit as string) const output = nodeData.outputs?.output as string + const orgId = options.orgId const _omitMetadataKeys = nodeData.inputs?.omitMetadataKeys as string @@ -149,7 +150,8 @@ class Cheerio_DocumentLoaders implements INode { try { let docs: IDocument[] = [] if (url.endsWith('.pdf')) { - if (process.env.DEBUG === 'true') options.logger.info(`CheerioWebBaseLoader does not support PDF files: ${url}`) + if (process.env.DEBUG === 'true') + options.logger.info(`[${orgId}]: CheerioWebBaseLoader does not support PDF files: ${url}`) return docs } const loader = new CheerioWebBaseLoader(url, params) @@ -161,7 +163,8 @@ class Cheerio_DocumentLoaders implements INode { } return docs } catch (err) { - if (process.env.DEBUG === 'true') options.logger.error(`error in CheerioWebBaseLoader: ${err.message}, on page: ${url}`) + if (process.env.DEBUG === 'true') + options.logger.error(`[${orgId}]: Error in CheerioWebBaseLoader: ${err.message}, on page: ${url}`) return [] } } @@ -169,7 +172,7 @@ class Cheerio_DocumentLoaders implements INode { let docs: IDocument[] = [] if (relativeLinksMethod) { - if (process.env.DEBUG === 'true') options.logger.info(`Start ${relativeLinksMethod}`) + if (process.env.DEBUG === 'true') options.logger.info(`[${orgId}]: Start CheerioWebBaseLoader ${relativeLinksMethod}`) // if limit is 0 we don't want it to default to 10 so we check explicitly for null or undefined // so when limit is 0 we can fetch all the links if (limit === null || limit === undefined) limit = 10 @@ -180,15 +183,18 @@ class Cheerio_DocumentLoaders implements INode { : relativeLinksMethod === 'webCrawl' ? await webCrawl(url, limit) : await xmlScrape(url, limit) - if (process.env.DEBUG === 'true') options.logger.info(`pages: ${JSON.stringify(pages)}, length: ${pages.length}`) + if (process.env.DEBUG === 'true') + options.logger.info(`[${orgId}]: CheerioWebBaseLoader pages: ${JSON.stringify(pages)}, length: ${pages.length}`) if (!pages || pages.length === 0) throw new Error('No relative links found') for (const page of pages) { docs.push(...(await cheerioLoader(page))) } - if (process.env.DEBUG === 'true') options.logger.info(`Finish ${relativeLinksMethod}`) + if (process.env.DEBUG === 'true') options.logger.info(`[${orgId}]: Finish CheerioWebBaseLoader ${relativeLinksMethod}`) } else if (selectedLinks && selectedLinks.length > 0) { if (process.env.DEBUG === 'true') - options.logger.info(`pages: ${JSON.stringify(selectedLinks)}, length: ${selectedLinks.length}`) + options.logger.info( + `[${orgId}]: CheerioWebBaseLoader pages: ${JSON.stringify(selectedLinks)}, length: ${selectedLinks.length}` + ) for (const page of selectedLinks.slice(0, limit)) { docs.push(...(await cheerioLoader(page))) } diff --git a/packages/components/nodes/documentloaders/Csv/Csv.ts b/packages/components/nodes/documentloaders/Csv/Csv.ts index 10186cf23a2..9e251a3e8ed 100644 --- a/packages/components/nodes/documentloaders/Csv/Csv.ts +++ b/packages/components/nodes/documentloaders/Csv/Csv.ts @@ -107,9 +107,9 @@ class Csv_DocumentLoaders implements INode { return { files, fromStorage } } - async getFileData(file: string, { chatflowid }: { chatflowid: string }, fromStorage?: boolean) { + async getFileData(file: string, { orgId, chatflowid }: { orgId: string; chatflowid: string }, fromStorage?: boolean) { if (fromStorage) { - return getFileFromStorage(file, chatflowid) + return getFileFromStorage(file, orgId, chatflowid) } else { const splitDataURI = file.split(',') splitDataURI.pop() @@ -126,6 +126,7 @@ class Csv_DocumentLoaders implements INode { let docs: IDocument[] = [] + const orgId = options.orgId const chatflowid = options.chatflowid const { files, fromStorage } = this.getFiles(nodeData) @@ -133,7 +134,7 @@ class Csv_DocumentLoaders implements INode { for (const file of files) { if (!file) continue - const fileData = await this.getFileData(file, { chatflowid }, fromStorage) + const fileData = await this.getFileData(file, { orgId, chatflowid }, fromStorage) const blob = new Blob([fileData]) const loader = new CSVLoader(blob, columnName.trim().length === 0 ? undefined : columnName.trim()) diff --git a/packages/components/nodes/documentloaders/CustomDocumentLoader/CustomDocumentLoader.ts b/packages/components/nodes/documentloaders/CustomDocumentLoader/CustomDocumentLoader.ts index ed19a01e731..18f58c25d22 100644 --- a/packages/components/nodes/documentloaders/CustomDocumentLoader/CustomDocumentLoader.ts +++ b/packages/components/nodes/documentloaders/CustomDocumentLoader/CustomDocumentLoader.ts @@ -72,7 +72,7 @@ class CustomDocumentLoader_DocumentLoaders implements INode { const appDataSource = options.appDataSource as DataSource const databaseEntities = options.databaseEntities as IDatabaseEntity - const variables = await getVars(appDataSource, databaseEntities, nodeData) + const variables = await getVars(appDataSource, databaseEntities, nodeData, options) const flow = { chatflowId: options.chatflowid, sessionId: options.sessionId, diff --git a/packages/components/nodes/documentloaders/DocumentStore/DocStoreLoader.ts b/packages/components/nodes/documentloaders/DocumentStore/DocStoreLoader.ts index be66e90ad88..e39b07ca1dc 100644 --- a/packages/components/nodes/documentloaders/DocumentStore/DocStoreLoader.ts +++ b/packages/components/nodes/documentloaders/DocumentStore/DocStoreLoader.ts @@ -60,7 +60,8 @@ class DocStore_DocumentLoaders implements INode { return returnData } - const stores = await appDataSource.getRepository(databaseEntities['DocumentStore']).find() + const searchOptions = options.searchOptions || {} + const stores = await appDataSource.getRepository(databaseEntities['DocumentStore']).findBy(searchOptions) for (const store of stores) { if (store.status === 'SYNC') { const obj = { diff --git a/packages/components/nodes/documentloaders/Docx/Docx.ts b/packages/components/nodes/documentloaders/Docx/Docx.ts index 70ab2c5c747..d59a1e2b36a 100644 --- a/packages/components/nodes/documentloaders/Docx/Docx.ts +++ b/packages/components/nodes/documentloaders/Docx/Docx.ts @@ -96,11 +96,12 @@ class Docx_DocumentLoaders implements INode { } else { files = [fileName] } + const orgId = options.orgId const chatflowid = options.chatflowid for (const file of files) { if (!file) continue - const fileData = await getFileFromStorage(file, chatflowid) + const fileData = await getFileFromStorage(file, orgId, chatflowid) const blob = new Blob([fileData]) const loader = new DocxLoader(blob) diff --git a/packages/components/nodes/documentloaders/Epub/Epub.ts b/packages/components/nodes/documentloaders/Epub/Epub.ts index 088d01a4e19..440cfe009bf 100644 --- a/packages/components/nodes/documentloaders/Epub/Epub.ts +++ b/packages/components/nodes/documentloaders/Epub/Epub.ts @@ -118,10 +118,11 @@ class Epub_DocumentLoaders implements INode { files = fileName.startsWith('[') && fileName.endsWith(']') ? JSON.parse(fileName) : [fileName] const chatflowid = options.chatflowid + const orgId = options.orgId for (const file of files) { if (!file) continue - const fileData = await getFileFromStorage(file, chatflowid) + const fileData = await getFileFromStorage(file, orgId, chatflowid) const tempFilePath = path.join(tempDir, `${Date.now()}_${file}`) fs.writeFileSync(tempFilePath, fileData) await this.extractDocs(usage, tempFilePath, textSplitter, docs) diff --git a/packages/components/nodes/documentloaders/File/File.ts b/packages/components/nodes/documentloaders/File/File.ts index 68ec54df0db..345a4ccc571 100644 --- a/packages/components/nodes/documentloaders/File/File.ts +++ b/packages/components/nodes/documentloaders/File/File.ts @@ -144,6 +144,7 @@ class File_DocumentLoaders implements INode { } else { files = [fileName] } + const orgId = options.orgId const chatflowid = options.chatflowid // specific to createAttachment to get files from chatId @@ -151,14 +152,14 @@ class File_DocumentLoaders implements INode { if (retrieveAttachmentChatId) { for (const file of files) { if (!file) continue - const fileData = await getFileFromStorage(file, chatflowid, options.chatId) + const fileData = await getFileFromStorage(file, orgId, chatflowid, options.chatId) const blob = new Blob([fileData]) fileBlobs.push({ blob, ext: file.split('.').pop() || '' }) } } else { for (const file of files) { if (!file) continue - const fileData = await getFileFromStorage(file, chatflowid) + const fileData = await getFileFromStorage(file, orgId, chatflowid) const blob = new Blob([fileData]) fileBlobs.push({ blob, ext: file.split('.').pop() || '' }) } diff --git a/packages/components/nodes/documentloaders/FireCrawl/FireCrawl.ts b/packages/components/nodes/documentloaders/FireCrawl/FireCrawl.ts index a2707d13b07..2c1778f422b 100644 --- a/packages/components/nodes/documentloaders/FireCrawl/FireCrawl.ts +++ b/packages/components/nodes/documentloaders/FireCrawl/FireCrawl.ts @@ -5,7 +5,6 @@ import { INode, INodeData, INodeParams, ICommonObject, INodeOutputsValue } from import { getCredentialData, getCredentialParam, handleEscapeCharacters } from '../../../src/utils' import axios, { AxiosResponse, AxiosRequestHeaders } from 'axios' import { z } from 'zod' -import { zodToJsonSchema } from 'zod-to-json-schema' // FirecrawlApp interfaces interface FirecrawlAppConfig { @@ -17,25 +16,24 @@ interface FirecrawlDocumentMetadata { title?: string description?: string language?: string - // ... (other metadata fields) + sourceURL?: string + statusCode?: number + error?: string [key: string]: any } interface FirecrawlDocument { - id?: string - url?: string - content: string markdown?: string html?: string - llm_extraction?: Record - createdAt?: Date - updatedAt?: Date - type?: string + rawHtml?: string + screenshot?: string + links?: string[] + actions?: { + screenshots?: string[] + } metadata: FirecrawlDocumentMetadata - childrenLinks?: string[] - provider?: string + llm_extraction?: Record warning?: string - index?: number } interface ScrapeResponse { @@ -46,9 +44,27 @@ interface ScrapeResponse { interface CrawlResponse { success: boolean - jobId?: string - data?: FirecrawlDocument[] + id: string + url: string error?: string + data?: FirecrawlDocument +} + +interface CrawlStatusResponse { + status: string + total: number + completed: number + creditsUsed: number + expiresAt: string + next?: string + data?: FirecrawlDocument[] +} + +interface ExtractResponse { + success: boolean + id: string + url: string + data?: Record } interface Params { @@ -60,6 +76,36 @@ interface Params { } } +interface ExtractRequest { + urls: string[] + prompt?: string + schema?: Record + enableWebSearch?: boolean + ignoreSitemap?: boolean + includeSubdomains?: boolean + showSources?: boolean + scrapeOptions?: { + formats?: string[] + onlyMainContent?: boolean + includeTags?: string | string[] + excludeTags?: string | string[] + mobile?: boolean + skipTlsVerification?: boolean + timeout?: number + jsonOptions?: { + schema?: Record + prompt?: string + } + } +} + +interface ExtractStatusResponse { + success: boolean + data: any + status: 'completed' | 'pending' | 'processing' | 'failed' | 'cancelled' + expiresAt: string +} + // FirecrawlApp class (not exported) class FirecrawlApp { private apiKey: string @@ -75,23 +121,47 @@ class FirecrawlApp { async scrapeUrl(url: string, params: Params | null = null): Promise { const headers = this.prepareHeaders() - let jsonData: Params = { url, ...params } - if (params?.extractorOptions?.extractionSchema) { - let schema = params.extractorOptions.extractionSchema - if (schema instanceof z.ZodSchema) { - schema = zodToJsonSchema(schema) + + // Create a clean payload with only valid parameters + const validParams: any = { + url, + formats: ['markdown'], + onlyMainContent: true + } + + // Add optional parameters if they exist + if (params?.scrapeOptions) { + if (params.scrapeOptions.includeTags) { + validParams.includeTags = Array.isArray(params.scrapeOptions.includeTags) + ? params.scrapeOptions.includeTags + : params.scrapeOptions.includeTags.split(',') } - jsonData = { - ...jsonData, - extractorOptions: { - ...params.extractorOptions, - extractionSchema: schema, - mode: params.extractorOptions.mode || 'llm-extraction' - } + if (params.scrapeOptions.excludeTags) { + validParams.excludeTags = Array.isArray(params.scrapeOptions.excludeTags) + ? params.scrapeOptions.excludeTags + : params.scrapeOptions.excludeTags.split(',') + } + if (params.scrapeOptions.mobile !== undefined) { + validParams.mobile = params.scrapeOptions.mobile + } + if (params.scrapeOptions.skipTlsVerification !== undefined) { + validParams.skipTlsVerification = params.scrapeOptions.skipTlsVerification + } + if (params.scrapeOptions.timeout) { + validParams.timeout = params.scrapeOptions.timeout + } + } + + // Add JSON options if they exist + if (params?.extractorOptions) { + validParams.jsonOptions = { + schema: params.extractorOptions.extractionSchema, + prompt: params.extractorOptions.extractionPrompt } } + try { - const response: AxiosResponse = await this.postRequest(this.apiUrl + '/v0/scrape', jsonData, headers) + const response: AxiosResponse = await this.postRequest(this.apiUrl + '/v1/scrape', validParams, headers) if (response.status === 200) { const responseData = response.data if (responseData.success) { @@ -114,33 +184,214 @@ class FirecrawlApp { waitUntilDone: boolean = true, pollInterval: number = 2, idempotencyKey?: string - ): Promise { + ): Promise { const headers = this.prepareHeaders(idempotencyKey) - let jsonData: Params = { url, ...params } + + // Create a clean payload with only valid parameters + const validParams: any = { + url + } + + // Add scrape options with only non-empty values + const scrapeOptions: any = { + formats: ['markdown'], + onlyMainContent: true + } + + // Add crawl-specific parameters if they exist and are not empty + if (params) { + const validCrawlParams = [ + 'excludePaths', + 'includePaths', + 'maxDepth', + 'maxDiscoveryDepth', + 'ignoreSitemap', + 'ignoreQueryParameters', + 'limit', + 'allowBackwardLinks', + 'allowExternalLinks', + 'delay' + ] + + validCrawlParams.forEach((param) => { + if (params[param] !== undefined && params[param] !== null && params[param] !== '') { + validParams[param] = params[param] + } + }) + } + + // Add scrape options if they exist and are not empty + if (params?.scrapeOptions) { + if (params.scrapeOptions.includePaths) { + const includePaths = Array.isArray(params.scrapeOptions.includePaths) + ? params.scrapeOptions.includePaths + : params.scrapeOptions.includePaths.split(',') + if (includePaths.length > 0) { + validParams.includePaths = includePaths + } + } + + if (params.scrapeOptions.excludePaths) { + const excludePaths = Array.isArray(params.scrapeOptions.excludePaths) + ? params.scrapeOptions.excludePaths + : params.scrapeOptions.excludePaths.split(',') + if (excludePaths.length > 0) { + validParams.excludePaths = excludePaths + } + } + + if (params.scrapeOptions.limit) { + validParams.limit = params.scrapeOptions.limit + } + + const validScrapeParams = ['mobile', 'skipTlsVerification', 'timeout', 'includeTags', 'excludeTags', 'onlyMainContent'] + + validScrapeParams.forEach((param) => { + if (params.scrapeOptions[param] !== undefined && params.scrapeOptions[param] !== null) { + scrapeOptions[param] = params.scrapeOptions[param] + } + }) + } + + // Only add scrapeOptions if it has more than just the default values + if (Object.keys(scrapeOptions).length > 2) { + validParams.scrapeOptions = scrapeOptions + } + try { - const response: AxiosResponse = await this.postRequest(this.apiUrl + '/v0/crawl', jsonData, headers) + const response: AxiosResponse = await this.postRequest(this.apiUrl + '/v1/crawl', validParams, headers) if (response.status === 200) { - const jobId: string = response.data.jobId + const crawlResponse = response.data as CrawlResponse + if (!crawlResponse.success) { + throw new Error(`Crawl request failed: ${crawlResponse.error || 'Unknown error'}`) + } + if (waitUntilDone) { - return this.monitorJobStatus(jobId, headers, pollInterval) + return this.monitorJobStatus(crawlResponse.id, headers, pollInterval) } else { - return { success: true, jobId } + return crawlResponse } } else { this.handleError(response, 'start crawl job') } + } catch (error: any) { + if (error.response?.data?.error) { + throw new Error(`Crawl failed: ${error.response.data.error}`) + } + throw new Error(`Crawl failed: ${error.message}`) + } + + return { success: false, id: '', url: '' } + } + + async extract( + request: ExtractRequest, + waitUntilDone: boolean = true, + pollInterval: number = 2 + ): Promise { + const headers = this.prepareHeaders() + + // Create a clean payload with only valid parameters + const validParams: any = { + urls: request.urls + } + + // Add optional parameters if they exist and are not empty + if (request.prompt) { + validParams.prompt = request.prompt + } + + if (request.schema) { + validParams.schema = request.schema + } + + const validExtractParams = ['enableWebSearch', 'ignoreSitemap', 'includeSubdomains', 'showSources'] as const + + validExtractParams.forEach((param) => { + if (request[param] !== undefined && request[param] !== null) { + validParams[param] = request[param] + } + }) + + // Add scrape options if they exist + if (request.scrapeOptions) { + const scrapeOptions: any = { + formats: ['markdown'], + onlyMainContent: true + } + + // Handle includeTags + if (request.scrapeOptions.includeTags) { + const includeTags = Array.isArray(request.scrapeOptions.includeTags) + ? request.scrapeOptions.includeTags + : request.scrapeOptions.includeTags.split(',') + if (includeTags.length > 0) { + scrapeOptions.includeTags = includeTags + } + } + + // Handle excludeTags + if (request.scrapeOptions.excludeTags) { + const excludeTags = Array.isArray(request.scrapeOptions.excludeTags) + ? request.scrapeOptions.excludeTags + : request.scrapeOptions.excludeTags.split(',') + if (excludeTags.length > 0) { + scrapeOptions.excludeTags = excludeTags + } + } + + // Add other scrape options if they exist and are not empty + const validScrapeParams = ['mobile', 'skipTlsVerification', 'timeout'] as const + + validScrapeParams.forEach((param) => { + if (request.scrapeOptions?.[param] !== undefined && request.scrapeOptions?.[param] !== null) { + scrapeOptions[param] = request.scrapeOptions[param] + } + }) + + // Add JSON options if they exist + if (request.scrapeOptions.jsonOptions) { + scrapeOptions.jsonOptions = {} + if (request.scrapeOptions.jsonOptions.schema) { + scrapeOptions.jsonOptions.schema = request.scrapeOptions.jsonOptions.schema + } + if (request.scrapeOptions.jsonOptions.prompt) { + scrapeOptions.jsonOptions.prompt = request.scrapeOptions.jsonOptions.prompt + } + } + + // Only add scrapeOptions if it has more than just the default values + if (Object.keys(scrapeOptions).length > 2) { + validParams.scrapeOptions = scrapeOptions + } + } + + try { + const response: AxiosResponse = await this.postRequest(this.apiUrl + '/v1/extract', validParams, headers) + if (response.status === 200) { + const extractResponse = response.data as ExtractResponse + if (waitUntilDone) { + return this.monitorExtractStatus(extractResponse.id, headers, pollInterval) + } else { + return extractResponse + } + } else { + this.handleError(response, 'start extract job') + } } catch (error: any) { throw new Error(error.message) } - return { success: false, error: 'Internal server error.' } + return { success: false, id: '', url: '' } } private prepareHeaders(idempotencyKey?: string): AxiosRequestHeaders { return { 'Content-Type': 'application/json', Authorization: `Bearer ${this.apiKey}`, + 'X-Origin': 'flowise', + 'X-Origin-Type': 'integration', ...(idempotencyKey ? { 'x-idempotency-key': idempotencyKey } : {}) - } as AxiosRequestHeaders & { 'x-idempotency-key'?: string } + } as AxiosRequestHeaders & { 'X-Origin': string; 'X-Origin-Type': string; 'x-idempotency-key'?: string } } private postRequest(url: string, data: Params, headers: AxiosRequestHeaders): Promise { @@ -151,33 +402,58 @@ class FirecrawlApp { return axios.get(url, { headers }) } - private async monitorJobStatus(jobId: string, headers: AxiosRequestHeaders, checkInterval: number): Promise { + private async monitorJobStatus(jobId: string, headers: AxiosRequestHeaders, checkInterval: number): Promise { let isJobCompleted = false while (!isJobCompleted) { - const statusResponse: AxiosResponse = await this.getRequest(this.apiUrl + `/v0/crawl/status/${jobId}`, headers) + const statusResponse: AxiosResponse = await this.getRequest(this.apiUrl + `/v1/crawl/${jobId}`, headers) if (statusResponse.status === 200) { - const statusData = statusResponse.data + const statusData = statusResponse.data as CrawlStatusResponse switch (statusData.status) { case 'completed': isJobCompleted = true - if ('data' in statusData) { - return statusData.data - } else { - throw new Error('Crawl job completed but no data was returned') + return statusData + case 'scraping': + case 'failed': + if (statusData.status === 'failed') { + throw new Error('Crawl job failed') } - case 'active': - case 'paused': - case 'pending': - case 'queued': await new Promise((resolve) => setTimeout(resolve, Math.max(checkInterval, 2) * 1000)) break default: - throw new Error(`Crawl job failed or was stopped. Status: ${statusData.status}`) + throw new Error(`Unknown crawl status: ${statusData.status}`) } } else { this.handleError(statusResponse, 'check crawl status') } } + throw new Error('Failed to monitor job status') + } + + private async monitorExtractStatus(jobId: string, headers: AxiosRequestHeaders, checkInterval: number): Promise { + let isJobCompleted = false + while (!isJobCompleted) { + const statusResponse: AxiosResponse = await this.getRequest(this.apiUrl + `/v1/extract/${jobId}`, headers) + if (statusResponse.status === 200) { + const statusData = statusResponse.data as ExtractStatusResponse + switch (statusData.status) { + case 'completed': + isJobCompleted = true + return statusData + case 'processing': + case 'failed': + if (statusData.status === 'failed') { + throw new Error('Extract job failed') + } + await new Promise((resolve) => setTimeout(resolve, Math.max(checkInterval, 2) * 1000)) + break + default: + throw new Error(`Unknown extract status: ${statusData.status}`) + } + } else { + this.handleError(statusResponse, 'check extract status') + } + } + throw new Error('Failed to monitor extract status') } private handleError(response: AxiosResponse, action: string): void { @@ -195,15 +471,15 @@ interface FirecrawlLoaderParameters { url: string apiKey?: string apiUrl?: string - mode?: 'crawl' | 'scrape' + mode?: 'crawl' | 'scrape' | 'extract' params?: Record } -class FireCrawlLoader extends BaseDocumentLoader { +export class FireCrawlLoader extends BaseDocumentLoader { private apiKey: string private apiUrl: string private url: string - private mode: 'crawl' | 'scrape' + private mode: 'crawl' | 'scrape' | 'extract' private params?: Record constructor(loaderParams: FirecrawlLoaderParameters) { @@ -231,19 +507,79 @@ class FireCrawlLoader extends BaseDocumentLoader { } firecrawlDocs = [response.data as FirecrawlDocument] } else if (this.mode === 'crawl') { - const response = await app.crawlUrl(this.url, this.params, true) - firecrawlDocs = response as FirecrawlDocument[] + const response = await app.crawlUrl(this.url, this.params) + if ('status' in response) { + if (response.status === 'failed') { + throw new Error('Firecrawl: Crawl job failed') + } + firecrawlDocs = response.data || [] + } else { + if (!response.success) { + throw new Error(`Firecrawl: Failed to scrape URL. Error: ${response.error}`) + } + firecrawlDocs = [response.data as FirecrawlDocument] + } + } else if (this.mode === 'extract') { + this.params!.urls = [this.url] + const response = await app.extract(this.params as any as ExtractRequest) + if (!response.success) { + throw new Error(`Firecrawl: Failed to extract URL.`) + } + + // Convert extract response to document format + if ('data' in response && response.data) { + // Create a document from the extracted data + const extractedData = response.data + const content = JSON.stringify(extractedData, null, 2) + + const metadata: Record = { + source: this.url, + type: 'extracted_data' + } + + // Add status and expiresAt if they exist in the response + if ('status' in response) { + metadata.status = response.status + } + if ('data' in response) { + metadata.data = response.data + } + if ('expiresAt' in response) { + metadata.expiresAt = response.expiresAt + } + + return [ + new Document({ + pageContent: content, + metadata + }) + ] + } + return [] } else { - throw new Error(`Unrecognized mode '${this.mode}'. Expected one of 'crawl', 'scrape'.`) + throw new Error(`Unrecognized mode '${this.mode}'. Expected one of 'crawl', 'scrape', 'extract'.`) } - return firecrawlDocs.map( - (doc) => - new Document({ - pageContent: doc.markdown || '', - metadata: doc.metadata || {} - }) - ) + // Convert Firecrawl documents to LangChain documents + const documents = firecrawlDocs.map((doc) => { + // Use markdown content if available, otherwise fallback to HTML or empty string + const content = doc.markdown || doc.html || doc.rawHtml || '' + + // Create a standard LangChain document + return new Document({ + pageContent: content, + metadata: { + ...doc.metadata, + source: doc.metadata?.sourceURL || this.url, + title: doc.metadata?.title, + description: doc.metadata?.description, + language: doc.metadata?.language, + statusCode: doc.metadata?.statusCode + } + }) + }) + + return documents } } @@ -266,7 +602,7 @@ class FireCrawl_DocumentLoaders implements INode { this.name = 'fireCrawl' this.type = 'Document' this.icon = 'firecrawl.png' - this.version = 2.1 + this.version = 3.0 this.category = 'Document Loaders' this.description = 'Load data from URL using FireCrawl' this.baseClasses = [this.type] @@ -287,7 +623,7 @@ class FireCrawl_DocumentLoaders implements INode { label: 'URLs', name: 'url', type: 'string', - description: 'URL to be crawled/scraped', + description: 'URL to be crawled/scraped/extracted', placeholder: 'https://docs.flowiseai.com' }, { @@ -304,47 +640,95 @@ class FireCrawl_DocumentLoaders implements INode { label: 'Scrape', name: 'scrape', description: 'Scrape a URL and get its content' + }, + { + label: 'Extract', + name: 'extract', + description: 'Extract data from a URL' } ], default: 'crawl' }, { - // maxCrawlPages - label: 'Max Crawl Pages', - name: 'maxCrawlPages', + // includeTags + label: '[Scrape] Include Tags', + name: 'includeTags', type: 'string', - description: 'Maximum number of pages to crawl', + description: 'Tags to include in the output. Use comma to separate multiple tags.', optional: true, additionalParams: true }, { - // generateImgAltText - label: 'Generate Image Alt Text', - name: 'generateImgAltText', - type: 'boolean', - description: 'Generate alt text for images', + // excludeTags + label: '[Scrape] Exclude Tags', + name: 'excludeTags', + type: 'string', + description: 'Tags to exclude from the output. Use comma to separate multiple tags.', optional: true, additionalParams: true }, { - // returnOnlyUrls - label: 'Return Only URLs', - name: 'returnOnlyUrls', + // onlyMainContent + label: '[Scrape] Only Main Content', + name: 'onlyMainContent', type: 'boolean', - description: 'Return only URLs of the crawled pages', + description: 'Extract only the main content of the page', optional: true, additionalParams: true }, { - // onlyMainContent - label: 'Only Main Content', - name: 'onlyMainContent', - type: 'boolean', - description: 'Extract only the main content of the page', + // limit + label: '[Crawl] Limit', + name: 'limit', + type: 'string', + description: 'Maximum number of pages to crawl', + optional: true, + additionalParams: true, + default: '10000' + }, + { + label: '[Crawl] Include Paths', + name: 'includePaths', + type: 'string', + description: + 'URL pathname regex patterns that include matching URLs in the crawl. Only the paths that match the specified patterns will be included in the response.', + placeholder: `blog/.*, news/.*`, + optional: true, + additionalParams: true + }, + { + label: '[Crawl] Exclude Paths', + name: 'excludePaths', + type: 'string', + description: 'URL pathname regex patterns that exclude matching URLs from the crawl.', + placeholder: `blog/.*, news/.*`, + optional: true, + additionalParams: true + }, + { + label: '[Extract] Schema', + name: 'extractSchema', + type: 'json', + description: 'JSON schema for data extraction', + optional: true, + additionalParams: true + }, + { + label: '[Extract] Prompt', + name: 'extractPrompt', + type: 'string', + description: 'Prompt for data extraction', + optional: true, + additionalParams: true + }, + { + label: '[Extract] Job ID', + name: 'extractJobId', + type: 'string', + description: 'ID of the extract job', optional: true, additionalParams: true } - // ... (other input parameters) ] this.outputs = [ { @@ -367,66 +751,72 @@ class FireCrawl_DocumentLoaders implements INode { const metadata = nodeData.inputs?.metadata const url = nodeData.inputs?.url as string const crawlerType = nodeData.inputs?.crawlerType as string - const maxCrawlPages = nodeData.inputs?.maxCrawlPages as string - const generateImgAltText = nodeData.inputs?.generateImgAltText as boolean - const returnOnlyUrls = nodeData.inputs?.returnOnlyUrls as boolean + const limit = nodeData.inputs?.limit as string const onlyMainContent = nodeData.inputs?.onlyMainContent as boolean const credentialData = await getCredentialData(nodeData.credential ?? '', options) const firecrawlApiToken = getCredentialParam('firecrawlApiToken', credentialData, nodeData) const firecrawlApiUrl = getCredentialParam('firecrawlApiUrl', credentialData, nodeData, 'https://api.firecrawl.dev') const output = nodeData.outputs?.output as string - const urlPatternsExcludes = nodeData.inputs?.urlPatternsExcludes - ? (nodeData.inputs.urlPatternsExcludes.split(',') as string[]) - : undefined - const urlPatternsIncludes = nodeData.inputs?.urlPatternsIncludes - ? (nodeData.inputs.urlPatternsIncludes.split(',') as string[]) - : undefined + const includePaths = nodeData.inputs?.includePaths ? (nodeData.inputs.includePaths.split(',') as string[]) : undefined + const excludePaths = nodeData.inputs?.excludePaths ? (nodeData.inputs.excludePaths.split(',') as string[]) : undefined + + const includeTags = nodeData.inputs?.includeTags ? (nodeData.inputs.includeTags.split(',') as string[]) : undefined + const excludeTags = nodeData.inputs?.excludeTags ? (nodeData.inputs.excludeTags.split(',') as string[]) : undefined + + const extractSchema = nodeData.inputs?.extractSchema + const extractPrompt = nodeData.inputs?.extractPrompt as string const input: FirecrawlLoaderParameters = { url, - mode: crawlerType as 'crawl' | 'scrape', + mode: crawlerType as 'crawl' | 'scrape' | 'extract', apiKey: firecrawlApiToken, apiUrl: firecrawlApiUrl, params: { - crawlerOptions: { - includes: urlPatternsIncludes, - excludes: urlPatternsExcludes, - generateImgAltText, - returnOnlyUrls, - limit: maxCrawlPages ? parseFloat(maxCrawlPages) : undefined + scrapeOptions: { + includePaths, + excludePaths, + limit: limit ? parseInt(limit, 10) : 1000, + includeTags, + excludeTags }, - pageOptions: { - onlyMainContent - } + schema: extractSchema || undefined, + prompt: extractPrompt || undefined + } + } + + if (onlyMainContent === true) { + const scrapeOptions = input.params?.scrapeOptions as any + input.params!.scrapeOptions = { + ...scrapeOptions, + onlyMainContent: true } } - const loader = new FireCrawlLoader(input) + const loader = new FireCrawlLoader(input) let docs = [] - if (textSplitter) { - docs = await loader.loadAndSplit(textSplitter) - } else { - docs = await loader.load() + // Load documents + docs = await loader.load() + + // Apply text splitting if configured + if (textSplitter && docs.length > 0) { + docs = await textSplitter.splitDocuments(docs) } + // Apply metadata if provided if (metadata) { const parsedMetadata = typeof metadata === 'object' ? metadata : JSON.parse(metadata) - let finaldocs = [] - for (const doc of docs) { - const newdoc = { - ...doc, - metadata: { - ...doc.metadata, - ...parsedMetadata - } + docs = docs.map((doc) => ({ + ...doc, + metadata: { + ...doc.metadata, + ...parsedMetadata } - finaldocs.push(newdoc) - } - return finaldocs + })) } + // Return based on output type if (output === 'document') { return docs } else { @@ -440,3 +830,6 @@ class FireCrawl_DocumentLoaders implements INode { } module.exports = { nodeClass: FireCrawl_DocumentLoaders } + +// FOR TESTING PURPOSES +// export { FireCrawl_DocumentLoaders } diff --git a/packages/components/nodes/documentloaders/Json/Json.ts b/packages/components/nodes/documentloaders/Json/Json.ts index ee07f1369be..f94138a4c0c 100644 --- a/packages/components/nodes/documentloaders/Json/Json.ts +++ b/packages/components/nodes/documentloaders/Json/Json.ts @@ -146,11 +146,12 @@ class Json_DocumentLoaders implements INode { } else { files = [fileName] } + const orgId = options.orgId const chatflowid = options.chatflowid for (const file of files) { if (!file) continue - const fileData = await getFileFromStorage(file, chatflowid) + const fileData = await getFileFromStorage(file, orgId, chatflowid) const blob = new Blob([fileData]) const loader = new JSONLoader(blob, pointers.length != 0 ? pointers : undefined, metadata) diff --git a/packages/components/nodes/documentloaders/Jsonlines/Jsonlines.ts b/packages/components/nodes/documentloaders/Jsonlines/Jsonlines.ts index fe345e00d52..2b035dcfc2a 100644 --- a/packages/components/nodes/documentloaders/Jsonlines/Jsonlines.ts +++ b/packages/components/nodes/documentloaders/Jsonlines/Jsonlines.ts @@ -135,11 +135,12 @@ class Jsonlines_DocumentLoaders implements INode { } else { files = [fileName] } + const orgId = options.orgId const chatflowid = options.chatflowid for (const file of files) { if (!file) continue - const fileData = await getFileFromStorage(file, chatflowid) + const fileData = await getFileFromStorage(file, orgId, chatflowid) const blob = new Blob([fileData]) const loader = new JSONLinesLoader(blob, pointer, metadata) diff --git a/packages/components/nodes/documentloaders/Pdf/Pdf.ts b/packages/components/nodes/documentloaders/Pdf/Pdf.ts index 7e4c777ffae..52f5197cfca 100644 --- a/packages/components/nodes/documentloaders/Pdf/Pdf.ts +++ b/packages/components/nodes/documentloaders/Pdf/Pdf.ts @@ -122,11 +122,12 @@ class Pdf_DocumentLoaders implements INode { } else { files = [fileName] } + const orgId = options.orgId const chatflowid = options.chatflowid for (const file of files) { if (!file) continue - const fileData = await getFileFromStorage(file, chatflowid) + const fileData = await getFileFromStorage(file, orgId, chatflowid) const bf = Buffer.from(fileData) await this.extractDocs(usage, bf, legacyBuild, textSplitter, docs) } diff --git a/packages/components/nodes/documentloaders/Playwright/Playwright.ts b/packages/components/nodes/documentloaders/Playwright/Playwright.ts index f0946dbd834..ba44ee7fcde 100644 --- a/packages/components/nodes/documentloaders/Playwright/Playwright.ts +++ b/packages/components/nodes/documentloaders/Playwright/Playwright.ts @@ -159,6 +159,7 @@ class Playwright_DocumentLoaders implements INode { let waitForSelector = nodeData.inputs?.waitForSelector as string const _omitMetadataKeys = nodeData.inputs?.omitMetadataKeys as string const output = nodeData.outputs?.output as string + const orgId = options.orgId let omitMetadataKeys: string[] = [] if (_omitMetadataKeys) { @@ -202,13 +203,14 @@ class Playwright_DocumentLoaders implements INode { } return docs } catch (err) { - if (process.env.DEBUG === 'true') options.logger.error(`error in PlaywrightWebBaseLoader: ${err.message}, on page: ${url}`) + if (process.env.DEBUG === 'true') + options.logger.error(`[${orgId}]: Error in PlaywrightWebBaseLoader: ${err.message}, on page: ${url}`) } } let docs: IDocument[] = [] if (relativeLinksMethod) { - if (process.env.DEBUG === 'true') options.logger.info(`Start ${relativeLinksMethod}`) + if (process.env.DEBUG === 'true') options.logger.info(`[${orgId}]: Start PlaywrightWebBaseLoader ${relativeLinksMethod}`) // if limit is 0 we don't want it to default to 10 so we check explicitly for null or undefined // so when limit is 0 we can fetch all the links if (limit === null || limit === undefined) limit = 10 @@ -219,15 +221,18 @@ class Playwright_DocumentLoaders implements INode { : relativeLinksMethod === 'webCrawl' ? await webCrawl(url, limit) : await xmlScrape(url, limit) - if (process.env.DEBUG === 'true') options.logger.info(`pages: ${JSON.stringify(pages)}, length: ${pages.length}`) + if (process.env.DEBUG === 'true') + options.logger.info(`[${orgId}]: PlaywrightWebBaseLoader pages: ${JSON.stringify(pages)}, length: ${pages.length}`) if (!pages || pages.length === 0) throw new Error('No relative links found') for (const page of pages) { docs.push(...(await playwrightLoader(page))) } - if (process.env.DEBUG === 'true') options.logger.info(`Finish ${relativeLinksMethod}`) + if (process.env.DEBUG === 'true') options.logger.info(`[${orgId}]: Finish PlaywrightWebBaseLoader ${relativeLinksMethod}`) } else if (selectedLinks && selectedLinks.length > 0) { if (process.env.DEBUG === 'true') - options.logger.info(`pages: ${JSON.stringify(selectedLinks)}, length: ${selectedLinks.length}`) + options.logger.info( + `[${orgId}]: PlaywrightWebBaseLoader pages: ${JSON.stringify(selectedLinks)}, length: ${selectedLinks.length}` + ) for (const page of selectedLinks.slice(0, limit)) { docs.push(...(await playwrightLoader(page))) } diff --git a/packages/components/nodes/documentloaders/Puppeteer/Puppeteer.ts b/packages/components/nodes/documentloaders/Puppeteer/Puppeteer.ts index fa88f96328d..607a99357e8 100644 --- a/packages/components/nodes/documentloaders/Puppeteer/Puppeteer.ts +++ b/packages/components/nodes/documentloaders/Puppeteer/Puppeteer.ts @@ -155,6 +155,7 @@ class Puppeteer_DocumentLoaders implements INode { let waitForSelector = nodeData.inputs?.waitForSelector as string const _omitMetadataKeys = nodeData.inputs?.omitMetadataKeys as string const output = nodeData.outputs?.output as string + const orgId = options.orgId let omitMetadataKeys: string[] = [] if (_omitMetadataKeys) { @@ -198,13 +199,14 @@ class Puppeteer_DocumentLoaders implements INode { } return docs } catch (err) { - if (process.env.DEBUG === 'true') options.logger.error(`error in PuppeteerWebBaseLoader: ${err.message}, on page: ${url}`) + if (process.env.DEBUG === 'true') + options.logger.error(`[${orgId}]: Error in PuppeteerWebBaseLoader: ${err.message}, on page: ${url}`) } } let docs: IDocument[] = [] if (relativeLinksMethod) { - if (process.env.DEBUG === 'true') options.logger.info(`Start ${relativeLinksMethod}`) + if (process.env.DEBUG === 'true') options.logger.info(`[${orgId}]: Start PuppeteerWebBaseLoader ${relativeLinksMethod}`) // if limit is 0 we don't want it to default to 10 so we check explicitly for null or undefined // so when limit is 0 we can fetch all the links if (limit === null || limit === undefined) limit = 10 @@ -215,15 +217,18 @@ class Puppeteer_DocumentLoaders implements INode { : relativeLinksMethod === 'webCrawl' ? await webCrawl(url, limit) : await xmlScrape(url, limit) - if (process.env.DEBUG === 'true') options.logger.info(`pages: ${JSON.stringify(pages)}, length: ${pages.length}`) + if (process.env.DEBUG === 'true') + options.logger.info(`[${orgId}]: PuppeteerWebBaseLoader pages: ${JSON.stringify(pages)}, length: ${pages.length}`) if (!pages || pages.length === 0) throw new Error('No relative links found') for (const page of pages) { docs.push(...(await puppeteerLoader(page))) } - if (process.env.DEBUG === 'true') options.logger.info(`Finish ${relativeLinksMethod}`) + if (process.env.DEBUG === 'true') options.logger.info(`[${orgId}]: Finish PuppeteerWebBaseLoader ${relativeLinksMethod}`) } else if (selectedLinks && selectedLinks.length > 0) { if (process.env.DEBUG === 'true') - options.logger.info(`pages: ${JSON.stringify(selectedLinks)}, length: ${selectedLinks.length}`) + options.logger.info( + `[${orgId}]: PuppeteerWebBaseLoader pages: ${JSON.stringify(selectedLinks)}, length: ${selectedLinks.length}` + ) for (const page of selectedLinks.slice(0, limit)) { docs.push(...(await puppeteerLoader(page))) } diff --git a/packages/components/nodes/documentloaders/S3File/README.md b/packages/components/nodes/documentloaders/S3File/README.md index 142ed86e1b9..ad069f5db80 100644 --- a/packages/components/nodes/documentloaders/S3File/README.md +++ b/packages/components/nodes/documentloaders/S3File/README.md @@ -4,10 +4,10 @@ DS File Loarder integration for Flowise ## 🌱 Env Variables -| Variable | Description | Type | Default | -| ---------------------------- | ----------------------------------------------------------------------------------------------- | ------------------------------------------------ | ----------------------------------- | -| UNSTRUCTURED_API_URL | Default `unstructuredApiUrl` for S3 File Loader | String | http://localhost:8000/general/v0/general | +| Variable | Description | Type | Default | +| -------------------- | ----------------------------------------------- | ------ | ---------------------------------------- | +| UNSTRUCTURED_API_URL | Default `unstructuredApiUrl` for S3 File Loader | String | http://localhost:8000/general/v0/general | ## License -Source code in this repository is made available under the [Apache License Version 2.0](https://github.com/FlowiseAI/Flowise/blob/master/LICENSE.md). \ No newline at end of file +Source code in this repository is made available under the [Apache License Version 2.0](https://github.com/FlowiseAI/Flowise/blob/master/LICENSE.md). diff --git a/packages/components/nodes/documentloaders/Text/Text.ts b/packages/components/nodes/documentloaders/Text/Text.ts index 950107d6a7a..448a25979dd 100644 --- a/packages/components/nodes/documentloaders/Text/Text.ts +++ b/packages/components/nodes/documentloaders/Text/Text.ts @@ -98,11 +98,12 @@ class Text_DocumentLoaders implements INode { } else { files = [fileName] } + const orgId = options.orgId const chatflowid = options.chatflowid for (const file of files) { if (!file) continue - const fileData = await getFileFromStorage(file, chatflowid) + const fileData = await getFileFromStorage(file, orgId, chatflowid) const blob = new Blob([fileData]) const loader = new TextLoader(blob) diff --git a/packages/components/nodes/documentloaders/Unstructured/README.md b/packages/components/nodes/documentloaders/Unstructured/README.md index 0854cc67d7b..5295e9f1f38 100644 --- a/packages/components/nodes/documentloaders/Unstructured/README.md +++ b/packages/components/nodes/documentloaders/Unstructured/README.md @@ -4,10 +4,10 @@ Unstructured File Loader integration for Flowise ## 🌱 Env Variables -| Variable | Description | Type | Default | -| ---------------------------- | ----------------------------------------------------------------------------------------------- | ------------------------------------------------ | ----------------------------------- | -| UNSTRUCTURED_API_URL | Default `apiUrl` for Unstructured File/Floder Loader | String | http://localhost:8000/general/v0/general | +| Variable | Description | Type | Default | +| -------------------- | ---------------------------------------------------- | ------ | ---------------------------------------- | +| UNSTRUCTURED_API_URL | Default `apiUrl` for Unstructured File/Floder Loader | String | http://localhost:8000/general/v0/general | ## License -Source code in this repository is made available under the [Apache License Version 2.0](https://github.com/FlowiseAI/Flowise/blob/master/LICENSE.md). \ No newline at end of file +Source code in this repository is made available under the [Apache License Version 2.0](https://github.com/FlowiseAI/Flowise/blob/master/LICENSE.md). diff --git a/packages/components/nodes/documentloaders/Unstructured/UnstructuredFile.ts b/packages/components/nodes/documentloaders/Unstructured/UnstructuredFile.ts index 82aa03accec..0e9b51cc214 100644 --- a/packages/components/nodes/documentloaders/Unstructured/UnstructuredFile.ts +++ b/packages/components/nodes/documentloaders/Unstructured/UnstructuredFile.ts @@ -532,11 +532,12 @@ class UnstructuredFile_DocumentLoaders implements INode { } else { files = [fileName] } + const orgId = options.orgId const chatflowid = options.chatflowid for (const file of files) { if (!file) continue - const fileData = await getFileFromStorage(file, chatflowid) + const fileData = await getFileFromStorage(file, orgId, chatflowid) const loaderDocs = await loader.loadAndSplitBuffer(fileData, file) docs.push(...loaderDocs) } diff --git a/packages/components/nodes/embeddings/AzureOpenAIEmbedding/README.md b/packages/components/nodes/embeddings/AzureOpenAIEmbedding/README.md index c3bd59e54e8..bd8df9f5392 100644 --- a/packages/components/nodes/embeddings/AzureOpenAIEmbedding/README.md +++ b/packages/components/nodes/embeddings/AzureOpenAIEmbedding/README.md @@ -4,13 +4,13 @@ Azure OpenAI Embedding Model integration for Flowise ## 🌱 Env Variables -| Variable | Description | Type | Default | -| ---------------------------- | ----------------------------------------------------------------------------------------------- | ------------------------------------------------ | ----------------------------------- | -| AZURE_OPENAI_API_KEY | Default `credential.azureOpenAIApiKey` for Azure OpenAI Model | String | | -| AZURE_OPENAI_API_INSTANCE_NAME | Default `credential.azureOpenAIApiInstanceName` for Azure OpenAI Model | String | | -| AZURE_OPENAI_API_EMBEDDINGS_DEPLOYMENT_NAME | Default `credential.azureOpenAIApiDeploymentName` for Azure OpenAI Model | String | | -| AZURE_OPENAI_API_VERSION | Default `credential.azureOpenAIApiVersion` for Azure OpenAI Model | String | | +| Variable | Description | Type | Default | +| ------------------------------------------- | ------------------------------------------------------------------------ | ------ | ------- | +| AZURE_OPENAI_API_KEY | Default `credential.azureOpenAIApiKey` for Azure OpenAI Model | String | | +| AZURE_OPENAI_API_INSTANCE_NAME | Default `credential.azureOpenAIApiInstanceName` for Azure OpenAI Model | String | | +| AZURE_OPENAI_API_EMBEDDINGS_DEPLOYMENT_NAME | Default `credential.azureOpenAIApiDeploymentName` for Azure OpenAI Model | String | | +| AZURE_OPENAI_API_VERSION | Default `credential.azureOpenAIApiVersion` for Azure OpenAI Model | String | | ## License -Source code in this repository is made available under the [Apache License Version 2.0](https://github.com/FlowiseAI/Flowise/blob/master/LICENSE.md). \ No newline at end of file +Source code in this repository is made available under the [Apache License Version 2.0](https://github.com/FlowiseAI/Flowise/blob/master/LICENSE.md). diff --git a/packages/components/nodes/engine/ChatEngine/ContextChatEngine.ts b/packages/components/nodes/engine/ChatEngine/ContextChatEngine.ts index 35b6ae069b4..db85d061a56 100644 --- a/packages/components/nodes/engine/ChatEngine/ContextChatEngine.ts +++ b/packages/components/nodes/engine/ChatEngine/ContextChatEngine.ts @@ -10,6 +10,7 @@ import { } from '../../../src/Interface' import { Metadata, BaseRetriever, LLM, ContextChatEngine, ChatMessage, NodeWithScore } from 'llamaindex' import { reformatSourceDocuments } from '../EngineUtils' +import { EvaluationRunTracerLlama } from '../../../evaluation/EvaluationRunTracerLlama' class ContextChatEngine_LlamaIndex implements INode { label: string @@ -93,6 +94,9 @@ class ContextChatEngine_LlamaIndex implements INode { const chatEngine = new ContextChatEngine({ chatModel: model, retriever: vectorStoreRetriever }) + // these are needed for evaluation runs + await EvaluationRunTracerLlama.injectEvaluationMetadata(nodeData, options, chatEngine) + const msgs = (await memory.getChatMessages(this.sessionId, false, prependMessages)) as IMessage[] for (const message of msgs) { if (message.type === 'apiMessage') { diff --git a/packages/components/nodes/engine/ChatEngine/SimpleChatEngine.ts b/packages/components/nodes/engine/ChatEngine/SimpleChatEngine.ts index e6045fda6c6..20f48edc197 100644 --- a/packages/components/nodes/engine/ChatEngine/SimpleChatEngine.ts +++ b/packages/components/nodes/engine/ChatEngine/SimpleChatEngine.ts @@ -9,6 +9,7 @@ import { IServerSideEventStreamer } from '../../../src/Interface' import { LLM, ChatMessage, SimpleChatEngine } from 'llamaindex' +import { EvaluationRunTracerLlama } from '../../../evaluation/EvaluationRunTracerLlama' class SimpleChatEngine_LlamaIndex implements INode { label: string @@ -78,6 +79,9 @@ class SimpleChatEngine_LlamaIndex implements INode { const chatEngine = new SimpleChatEngine({ llm: model }) + // these are needed for evaluation runs + await EvaluationRunTracerLlama.injectEvaluationMetadata(nodeData, options, chatEngine) + const msgs = (await memory.getChatMessages(this.sessionId, false, prependMessages)) as IMessage[] for (const message of msgs) { if (message.type === 'apiMessage') { diff --git a/packages/components/nodes/engine/QueryEngine/QueryEngine.ts b/packages/components/nodes/engine/QueryEngine/QueryEngine.ts index 14eb3c5de1d..2a7e988669d 100644 --- a/packages/components/nodes/engine/QueryEngine/QueryEngine.ts +++ b/packages/components/nodes/engine/QueryEngine/QueryEngine.ts @@ -10,6 +10,7 @@ import { NodeWithScore } from 'llamaindex' import { reformatSourceDocuments } from '../EngineUtils' +import { EvaluationRunTracerLlama } from '../../../evaluation/EvaluationRunTracerLlama' class QueryEngine_LlamaIndex implements INode { label: string @@ -72,6 +73,8 @@ class QueryEngine_LlamaIndex implements INode { let sourceNodes: NodeWithScore[] = [] let isStreamingStarted = false + await EvaluationRunTracerLlama.injectEvaluationMetadata(nodeData, options, queryEngine) + const shouldStreamResponse = options.shouldStreamResponse const sseStreamer: IServerSideEventStreamer = options.sseStreamer as IServerSideEventStreamer const chatId = options.chatId diff --git a/packages/components/nodes/engine/SubQuestionQueryEngine/SubQuestionQueryEngine.ts b/packages/components/nodes/engine/SubQuestionQueryEngine/SubQuestionQueryEngine.ts index 6d8ceead9c4..b19eb2346ae 100644 --- a/packages/components/nodes/engine/SubQuestionQueryEngine/SubQuestionQueryEngine.ts +++ b/packages/components/nodes/engine/SubQuestionQueryEngine/SubQuestionQueryEngine.ts @@ -15,6 +15,7 @@ import { NodeWithScore } from 'llamaindex' import { reformatSourceDocuments } from '../EngineUtils' +import { EvaluationRunTracerLlama } from '../../../evaluation/EvaluationRunTracerLlama' class SubQuestionQueryEngine_LlamaIndex implements INode { label: string @@ -89,6 +90,8 @@ class SubQuestionQueryEngine_LlamaIndex implements INode { let sourceNodes: NodeWithScore[] = [] let isStreamingStarted = false + await EvaluationRunTracerLlama.injectEvaluationMetadata(nodeData, options, queryEngine) + const shouldStreamResponse = options.shouldStreamResponse const sseStreamer: IServerSideEventStreamer = options.sseStreamer as IServerSideEventStreamer const chatId = options.chatId diff --git a/packages/components/nodes/llms/Azure OpenAI/README.md b/packages/components/nodes/llms/Azure OpenAI/README.md index de47c4dd813..9d7dd4a5044 100644 --- a/packages/components/nodes/llms/Azure OpenAI/README.md +++ b/packages/components/nodes/llms/Azure OpenAI/README.md @@ -4,13 +4,13 @@ Azure OpenAI LLM integration for Flowise ## 🌱 Env Variables -| Variable | Description | Type | Default | -| ---------------------------- | ----------------------------------------------------------------------------------------------- | ------------------------------------------------ | ----------------------------------- | -| AZURE_OPENAI_API_KEY | Default `credential.azureOpenAIApiKey` for Azure OpenAI LLM | String | | -| AZURE_OPENAI_API_INSTANCE_NAME | Default `credential.azureOpenAIApiInstanceName` for Azure OpenAI LLM | String | | -| AZURE_OPENAI_API_DEPLOYMENT_NAME | Default `credential.azureOpenAIApiDeploymentName` for Azure OpenAI LLM | String | | -| AZURE_OPENAI_API_VERSION | Default `credential.azureOpenAIApiVersion` for Azure OpenAI LLM | String | | +| Variable | Description | Type | Default | +| -------------------------------- | ---------------------------------------------------------------------- | ------ | ------- | +| AZURE_OPENAI_API_KEY | Default `credential.azureOpenAIApiKey` for Azure OpenAI LLM | String | | +| AZURE_OPENAI_API_INSTANCE_NAME | Default `credential.azureOpenAIApiInstanceName` for Azure OpenAI LLM | String | | +| AZURE_OPENAI_API_DEPLOYMENT_NAME | Default `credential.azureOpenAIApiDeploymentName` for Azure OpenAI LLM | String | | +| AZURE_OPENAI_API_VERSION | Default `credential.azureOpenAIApiVersion` for Azure OpenAI LLM | String | | ## License -Source code in this repository is made available under the [Apache License Version 2.0](https://github.com/FlowiseAI/Flowise/blob/master/LICENSE.md). \ No newline at end of file +Source code in this repository is made available under the [Apache License Version 2.0](https://github.com/FlowiseAI/Flowise/blob/master/LICENSE.md). diff --git a/packages/components/nodes/memory/AgentMemory/AgentMemory.ts b/packages/components/nodes/memory/AgentMemory/AgentMemory.ts index d317e138f1d..ca6c0ebed1c 100644 --- a/packages/components/nodes/memory/AgentMemory/AgentMemory.ts +++ b/packages/components/nodes/memory/AgentMemory/AgentMemory.ts @@ -108,6 +108,7 @@ class AgentMemory_Memory implements INode { const databaseType = nodeData.inputs?.databaseType as string const databaseEntities = options.databaseEntities as IDatabaseEntity const chatflowid = options.chatflowid as string + const orgId = options.orgId as string const appDataSource = options.appDataSource as DataSource let additionalConfiguration = {} @@ -135,7 +136,8 @@ class AgentMemory_Memory implements INode { threadId, appDataSource, databaseEntities, - chatflowid + chatflowid, + orgId } const recordManager = new SqliteSaver(args) return recordManager @@ -159,7 +161,8 @@ class AgentMemory_Memory implements INode { threadId, appDataSource, databaseEntities, - chatflowid + chatflowid, + orgId } const recordManager = new PostgresSaver(args) return recordManager @@ -184,7 +187,8 @@ class AgentMemory_Memory implements INode { threadId, appDataSource, databaseEntities, - chatflowid + chatflowid, + orgId } const recordManager = new MySQLSaver(args) return recordManager diff --git a/packages/components/nodes/memory/AgentMemory/MySQLAgentMemory/MySQLAgentMemory.ts b/packages/components/nodes/memory/AgentMemory/MySQLAgentMemory/MySQLAgentMemory.ts index af3ebe6191b..3eb5c950350 100644 --- a/packages/components/nodes/memory/AgentMemory/MySQLAgentMemory/MySQLAgentMemory.ts +++ b/packages/components/nodes/memory/AgentMemory/MySQLAgentMemory/MySQLAgentMemory.ts @@ -65,6 +65,7 @@ class MySQLAgentMemory_Memory implements INode { const databaseEntities = options.databaseEntities as IDatabaseEntity const chatflowid = options.chatflowid as string const appDataSource = options.appDataSource as DataSource + const orgId = options.orgId as string let additionalConfiguration = {} if (additionalConfig) { @@ -102,7 +103,8 @@ class MySQLAgentMemory_Memory implements INode { threadId, appDataSource, databaseEntities, - chatflowid + chatflowid, + orgId } const recordManager = new MySQLSaver(args) return recordManager diff --git a/packages/components/nodes/memory/AgentMemory/MySQLAgentMemory/mysqlSaver.ts b/packages/components/nodes/memory/AgentMemory/MySQLAgentMemory/mysqlSaver.ts index 5b41b08874b..f55098ed790 100644 --- a/packages/components/nodes/memory/AgentMemory/MySQLAgentMemory/mysqlSaver.ts +++ b/packages/components/nodes/memory/AgentMemory/MySQLAgentMemory/mysqlSaver.ts @@ -242,7 +242,7 @@ export class MySQLSaver extends BaseCheckpointSaver implements MemoryMethods { } if (returnBaseMessages) { - return await mapChatMessageToBaseMessage(chatMessage) + return await mapChatMessageToBaseMessage(chatMessage, this.config.orgId) } let returnIMessages: IMessage[] = [] diff --git a/packages/components/nodes/memory/AgentMemory/PostgresAgentMemory/PostgresAgentMemory.ts b/packages/components/nodes/memory/AgentMemory/PostgresAgentMemory/PostgresAgentMemory.ts index cb6d041a6b2..2ab86f66e0e 100644 --- a/packages/components/nodes/memory/AgentMemory/PostgresAgentMemory/PostgresAgentMemory.ts +++ b/packages/components/nodes/memory/AgentMemory/PostgresAgentMemory/PostgresAgentMemory.ts @@ -65,6 +65,7 @@ class PostgresAgentMemory_Memory implements INode { const databaseEntities = options.databaseEntities as IDatabaseEntity const chatflowid = options.chatflowid as string const appDataSource = options.appDataSource as DataSource + const orgId = options.orgId as string let additionalConfiguration = {} if (additionalConfig) { @@ -101,7 +102,8 @@ class PostgresAgentMemory_Memory implements INode { threadId, appDataSource, databaseEntities, - chatflowid + chatflowid, + orgId } const recordManager = new PostgresSaver(args) return recordManager diff --git a/packages/components/nodes/memory/AgentMemory/PostgresAgentMemory/pgSaver.ts b/packages/components/nodes/memory/AgentMemory/PostgresAgentMemory/pgSaver.ts index 5bc69f663b9..59d6f720141 100644 --- a/packages/components/nodes/memory/AgentMemory/PostgresAgentMemory/pgSaver.ts +++ b/packages/components/nodes/memory/AgentMemory/PostgresAgentMemory/pgSaver.ts @@ -283,7 +283,7 @@ CREATE TABLE IF NOT EXISTS ${tableName} ( } if (returnBaseMessages) { - return await mapChatMessageToBaseMessage(chatMessage) + return await mapChatMessageToBaseMessage(chatMessage, this.config.orgId) } let returnIMessages: IMessage[] = [] diff --git a/packages/components/nodes/memory/AgentMemory/SQLiteAgentMemory/SQLiteAgentMemory.ts b/packages/components/nodes/memory/AgentMemory/SQLiteAgentMemory/SQLiteAgentMemory.ts index b301e3a8830..29c9b0a37c5 100644 --- a/packages/components/nodes/memory/AgentMemory/SQLiteAgentMemory/SQLiteAgentMemory.ts +++ b/packages/components/nodes/memory/AgentMemory/SQLiteAgentMemory/SQLiteAgentMemory.ts @@ -51,6 +51,7 @@ class SQLiteAgentMemory_Memory implements INode { const databaseEntities = options.databaseEntities as IDatabaseEntity const chatflowid = options.chatflowid as string const appDataSource = options.appDataSource as DataSource + const orgId = options.orgId as string let additionalConfiguration = {} if (additionalConfig) { @@ -76,7 +77,8 @@ class SQLiteAgentMemory_Memory implements INode { threadId, appDataSource, databaseEntities, - chatflowid + chatflowid, + orgId } const recordManager = new SqliteSaver(args) diff --git a/packages/components/nodes/memory/AgentMemory/SQLiteAgentMemory/sqliteSaver.ts b/packages/components/nodes/memory/AgentMemory/SQLiteAgentMemory/sqliteSaver.ts index 351c7896185..4ca0fa1375f 100644 --- a/packages/components/nodes/memory/AgentMemory/SQLiteAgentMemory/sqliteSaver.ts +++ b/packages/components/nodes/memory/AgentMemory/SQLiteAgentMemory/sqliteSaver.ts @@ -266,7 +266,7 @@ CREATE TABLE IF NOT EXISTS ${tableName} ( } if (returnBaseMessages) { - return await mapChatMessageToBaseMessage(chatMessage) + return await mapChatMessageToBaseMessage(chatMessage, this.config.orgId) } let returnIMessages: IMessage[] = [] diff --git a/packages/components/nodes/memory/AgentMemory/interface.ts b/packages/components/nodes/memory/AgentMemory/interface.ts index e2be67320dd..2e036cde6e9 100644 --- a/packages/components/nodes/memory/AgentMemory/interface.ts +++ b/packages/components/nodes/memory/AgentMemory/interface.ts @@ -9,6 +9,7 @@ export type SaverOptions = { appDataSource: DataSource databaseEntities: IDatabaseEntity chatflowid: string + orgId: string } export interface CheckpointTuple { diff --git a/packages/components/nodes/memory/BufferMemory/BufferMemory.ts b/packages/components/nodes/memory/BufferMemory/BufferMemory.ts index 972301daf8b..1a46e0d3ce6 100644 --- a/packages/components/nodes/memory/BufferMemory/BufferMemory.ts +++ b/packages/components/nodes/memory/BufferMemory/BufferMemory.ts @@ -61,6 +61,7 @@ class BufferMemory_Memory implements INode { const appDataSource = options.appDataSource as DataSource const databaseEntities = options.databaseEntities as IDatabaseEntity const chatflowid = options.chatflowid as string + const orgId = options.orgId as string return new BufferMemoryExtended({ returnMessages: true, @@ -68,7 +69,8 @@ class BufferMemory_Memory implements INode { sessionId, appDataSource, databaseEntities, - chatflowid + chatflowid, + orgId }) } } @@ -78,12 +80,14 @@ interface BufferMemoryExtendedInput { appDataSource: DataSource databaseEntities: IDatabaseEntity chatflowid: string + orgId: string } class BufferMemoryExtended extends FlowiseMemory implements MemoryMethods { appDataSource: DataSource databaseEntities: IDatabaseEntity chatflowid: string + orgId: string sessionId = '' constructor(fields: BufferMemoryInput & BufferMemoryExtendedInput) { @@ -92,6 +96,7 @@ class BufferMemoryExtended extends FlowiseMemory implements MemoryMethods { this.appDataSource = fields.appDataSource this.databaseEntities = fields.databaseEntities this.chatflowid = fields.chatflowid + this.orgId = fields.orgId } async getChatMessages( @@ -117,7 +122,7 @@ class BufferMemoryExtended extends FlowiseMemory implements MemoryMethods { } if (returnBaseMessages) { - return await mapChatMessageToBaseMessage(chatMessage) + return await mapChatMessageToBaseMessage(chatMessage, this.orgId) } let returnIMessages: IMessage[] = [] diff --git a/packages/components/nodes/memory/BufferWindowMemory/BufferWindowMemory.ts b/packages/components/nodes/memory/BufferWindowMemory/BufferWindowMemory.ts index 82cfd92916f..e63e106264f 100644 --- a/packages/components/nodes/memory/BufferWindowMemory/BufferWindowMemory.ts +++ b/packages/components/nodes/memory/BufferWindowMemory/BufferWindowMemory.ts @@ -69,6 +69,7 @@ class BufferWindowMemory_Memory implements INode { const appDataSource = options.appDataSource as DataSource const databaseEntities = options.databaseEntities as IDatabaseEntity const chatflowid = options.chatflowid as string + const orgId = options.orgId as string const obj: Partial & BufferMemoryExtendedInput = { returnMessages: true, @@ -77,7 +78,8 @@ class BufferWindowMemory_Memory implements INode { k: parseInt(k, 10), appDataSource, databaseEntities, - chatflowid + chatflowid, + orgId } return new BufferWindowMemoryExtended(obj) @@ -89,12 +91,14 @@ interface BufferMemoryExtendedInput { appDataSource: DataSource databaseEntities: IDatabaseEntity chatflowid: string + orgId: string } class BufferWindowMemoryExtended extends FlowiseWindowMemory implements MemoryMethods { appDataSource: DataSource databaseEntities: IDatabaseEntity chatflowid: string + orgId: string sessionId = '' constructor(fields: BufferWindowMemoryInput & BufferMemoryExtendedInput) { @@ -103,6 +107,7 @@ class BufferWindowMemoryExtended extends FlowiseWindowMemory implements MemoryMe this.appDataSource = fields.appDataSource this.databaseEntities = fields.databaseEntities this.chatflowid = fields.chatflowid + this.orgId = fields.orgId } async getChatMessages( @@ -134,7 +139,7 @@ class BufferWindowMemoryExtended extends FlowiseWindowMemory implements MemoryMe } if (returnBaseMessages) { - return await mapChatMessageToBaseMessage(chatMessage) + return await mapChatMessageToBaseMessage(chatMessage, this.orgId) } let returnIMessages: IMessage[] = [] diff --git a/packages/components/nodes/memory/ConversationSummaryBufferMemory/ConversationSummaryBufferMemory.ts b/packages/components/nodes/memory/ConversationSummaryBufferMemory/ConversationSummaryBufferMemory.ts index c45f93484be..f8d3c00f0cc 100644 --- a/packages/components/nodes/memory/ConversationSummaryBufferMemory/ConversationSummaryBufferMemory.ts +++ b/packages/components/nodes/memory/ConversationSummaryBufferMemory/ConversationSummaryBufferMemory.ts @@ -78,6 +78,7 @@ class ConversationSummaryBufferMemory_Memory implements INode { const appDataSource = options.appDataSource as DataSource const databaseEntities = options.databaseEntities as IDatabaseEntity const chatflowid = options.chatflowid as string + const orgId = options.orgId as string const obj: ConversationSummaryBufferMemoryInput & BufferMemoryExtendedInput = { llm: model, @@ -87,7 +88,8 @@ class ConversationSummaryBufferMemory_Memory implements INode { returnMessages: true, appDataSource, databaseEntities, - chatflowid + chatflowid, + orgId } return new ConversationSummaryBufferMemoryExtended(obj) @@ -99,12 +101,14 @@ interface BufferMemoryExtendedInput { appDataSource: DataSource databaseEntities: IDatabaseEntity chatflowid: string + orgId: string } class ConversationSummaryBufferMemoryExtended extends FlowiseSummaryBufferMemory implements MemoryMethods { appDataSource: DataSource databaseEntities: IDatabaseEntity chatflowid: string + orgId: string sessionId = '' constructor(fields: ConversationSummaryBufferMemoryInput & BufferMemoryExtendedInput) { @@ -113,6 +117,7 @@ class ConversationSummaryBufferMemoryExtended extends FlowiseSummaryBufferMemory this.appDataSource = fields.appDataSource this.databaseEntities = fields.databaseEntities this.chatflowid = fields.chatflowid + this.orgId = fields.orgId } async getChatMessages( @@ -137,7 +142,7 @@ class ConversationSummaryBufferMemoryExtended extends FlowiseSummaryBufferMemory chatMessage.unshift(...prependMessages) } - let baseMessages = await mapChatMessageToBaseMessage(chatMessage) + let baseMessages = await mapChatMessageToBaseMessage(chatMessage, this.orgId) // Prune baseMessages if it exceeds max token limit if (this.movingSummaryBuffer) { diff --git a/packages/components/nodes/memory/ConversationSummaryMemory/ConversationSummaryMemory.ts b/packages/components/nodes/memory/ConversationSummaryMemory/ConversationSummaryMemory.ts index 6c4d078baf7..44cb0902976 100644 --- a/packages/components/nodes/memory/ConversationSummaryMemory/ConversationSummaryMemory.ts +++ b/packages/components/nodes/memory/ConversationSummaryMemory/ConversationSummaryMemory.ts @@ -69,6 +69,7 @@ class ConversationSummaryMemory_Memory implements INode { const appDataSource = options.appDataSource as DataSource const databaseEntities = options.databaseEntities as IDatabaseEntity const chatflowid = options.chatflowid as string + const orgId = options.orgId as string const obj: ConversationSummaryMemoryInput & BufferMemoryExtendedInput = { llm: model, @@ -77,7 +78,8 @@ class ConversationSummaryMemory_Memory implements INode { sessionId, appDataSource, databaseEntities, - chatflowid + chatflowid, + orgId } return new ConversationSummaryMemoryExtended(obj) @@ -89,12 +91,14 @@ interface BufferMemoryExtendedInput { appDataSource: DataSource databaseEntities: IDatabaseEntity chatflowid: string + orgId: string } class ConversationSummaryMemoryExtended extends FlowiseSummaryMemory implements MemoryMethods { appDataSource: DataSource databaseEntities: IDatabaseEntity chatflowid: string + orgId: string sessionId = '' constructor(fields: ConversationSummaryMemoryInput & BufferMemoryExtendedInput) { @@ -103,6 +107,7 @@ class ConversationSummaryMemoryExtended extends FlowiseSummaryMemory implements this.appDataSource = fields.appDataSource this.databaseEntities = fields.databaseEntities this.chatflowid = fields.chatflowid + this.orgId = fields.orgId } async getChatMessages( @@ -128,7 +133,7 @@ class ConversationSummaryMemoryExtended extends FlowiseSummaryMemory implements chatMessage.unshift(...prependMessages) } - const baseMessages = await mapChatMessageToBaseMessage(chatMessage) + const baseMessages = await mapChatMessageToBaseMessage(chatMessage, this.orgId) // Get summary if (this.llm && typeof this.llm !== 'string') { diff --git a/packages/components/nodes/memory/DynamoDb/DynamoDb.ts b/packages/components/nodes/memory/DynamoDb/DynamoDb.ts index 0ad0354f46c..91e4e7298bf 100644 --- a/packages/components/nodes/memory/DynamoDb/DynamoDb.ts +++ b/packages/components/nodes/memory/DynamoDb/DynamoDb.ts @@ -125,6 +125,8 @@ const initializeDynamoDB = async (nodeData: INodeData, options: ICommonObject): config }) + const orgId = options.orgId as string + const memory = new BufferMemoryExtended({ memoryKey: memoryKey ?? 'chat_history', chatHistory: dynamoDb, @@ -132,7 +134,8 @@ const initializeDynamoDB = async (nodeData: INodeData, options: ICommonObject): dynamodbClient: client, tableName, partitionKey, - dynamoKey: { [partitionKey]: { S: sessionId } } + dynamoKey: { [partitionKey]: { S: sessionId } }, + orgId }) return memory } @@ -143,6 +146,7 @@ interface BufferMemoryExtendedInput { tableName: string partitionKey: string dynamoKey: Record + orgId: string } interface DynamoDBSerializedChatMessage { @@ -165,6 +169,7 @@ class BufferMemoryExtended extends FlowiseMemory implements MemoryMethods { private dynamoKey: Record private messageAttributeName: string sessionId = '' + orgId = '' dynamodbClient: DynamoDBClient constructor(fields: BufferMemoryInput & BufferMemoryExtendedInput) { @@ -174,6 +179,7 @@ class BufferMemoryExtended extends FlowiseMemory implements MemoryMethods { this.tableName = fields.tableName this.partitionKey = fields.partitionKey this.dynamoKey = fields.dynamoKey + this.orgId = fields.orgId } overrideDynamoKey(overrideSessionId = '') { @@ -260,7 +266,7 @@ class BufferMemoryExtended extends FlowiseMemory implements MemoryMethods { .filter((x): x is StoredMessage => x.type !== undefined && x.data.content !== undefined) const baseMessages = messages.map(mapStoredMessageToChatMessage) if (prependMessages?.length) { - baseMessages.unshift(...(await mapChatMessageToBaseMessage(prependMessages))) + baseMessages.unshift(...(await mapChatMessageToBaseMessage(prependMessages, this.orgId))) } return returnBaseMessages ? baseMessages : convertBaseMessagetoIMessage(baseMessages) } diff --git a/packages/components/nodes/memory/Mem0/Mem0.ts b/packages/components/nodes/memory/Mem0/Mem0.ts index ba7960163dc..9be79aabade 100644 --- a/packages/components/nodes/memory/Mem0/Mem0.ts +++ b/packages/components/nodes/memory/Mem0/Mem0.ts @@ -15,6 +15,12 @@ interface BufferMemoryExtendedInput { chatflowid: string } +interface NodeFields extends Mem0MemoryInput, Mem0MemoryExtendedInput, BufferMemoryExtendedInput { + searchOnly: boolean + useFlowiseChatId: boolean + input: string +} + class Mem0_Memory implements INode { label: string name: string @@ -143,14 +149,15 @@ class Mem0_Memory implements INode { ] } - async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { - return await initializeMem0(nodeData, options) + async init(nodeData: INodeData, input: string, options: ICommonObject): Promise { + return await initializeMem0(nodeData, input, options) } } -const initializeMem0 = async (nodeData: INodeData, options: ICommonObject): Promise => { +const initializeMem0 = async (nodeData: INodeData, input: string, options: ICommonObject): Promise => { const initialUserId = nodeData.inputs?.user_id as string const useFlowiseChatId = nodeData.inputs?.useFlowiseChatId as boolean + const orgId = options.orgId as string if (!useFlowiseChatId && !initialUserId) { throw new Error('User ID field cannot be empty when "Use Flowise Chat ID" is OFF.') @@ -183,23 +190,24 @@ const initializeMem0 = async (nodeData: INodeData, options: ICommonObject): Prom filters: (nodeData.inputs?.filters as Record) || {} } - const obj: Mem0MemoryInput & Mem0MemoryExtendedInput & BufferMemoryExtendedInput & { searchOnly: boolean; useFlowiseChatId: boolean } = - { - apiKey: apiKey, - humanPrefix: nodeData.inputs?.humanPrefix as string, - aiPrefix: nodeData.inputs?.aiPrefix as string, - inputKey: nodeData.inputs?.inputKey as string, - sessionId: constructorSessionId, - mem0Options: mem0Options, - memoryOptions: memoryOptions, - separateMessages: false, - returnMessages: false, - appDataSource: options.appDataSource as DataSource, - databaseEntities: options.databaseEntities as IDatabaseEntity, - chatflowid: options.chatflowid as string, - searchOnly: (nodeData.inputs?.searchOnly as boolean) || false, - useFlowiseChatId: useFlowiseChatId - } + const obj: NodeFields = { + apiKey: apiKey, + humanPrefix: nodeData.inputs?.humanPrefix as string, + aiPrefix: nodeData.inputs?.aiPrefix as string, + inputKey: nodeData.inputs?.inputKey as string, + sessionId: constructorSessionId, + mem0Options: mem0Options, + memoryOptions: memoryOptions, + separateMessages: false, + returnMessages: false, + appDataSource: options.appDataSource as DataSource, + databaseEntities: options.databaseEntities as IDatabaseEntity, + chatflowid: options.chatflowid as string, + searchOnly: (nodeData.inputs?.searchOnly as boolean) || false, + useFlowiseChatId: useFlowiseChatId, + input: input, + orgId: orgId + } return new Mem0MemoryExtended(obj) } @@ -207,11 +215,13 @@ const initializeMem0 = async (nodeData: INodeData, options: ICommonObject): Prom interface Mem0MemoryExtendedInput extends Mem0MemoryInput { memoryOptions?: MemoryOptions | SearchOptions useFlowiseChatId: boolean + orgId: string } class Mem0MemoryExtended extends BaseMem0Memory implements MemoryMethods { initialUserId: string userId: string + orgId: string memoryKey: string inputKey: string appDataSource: DataSource @@ -219,10 +229,9 @@ class Mem0MemoryExtended extends BaseMem0Memory implements MemoryMethods { chatflowid: string searchOnly: boolean useFlowiseChatId: boolean + input: string - constructor( - fields: Mem0MemoryInput & Mem0MemoryExtendedInput & BufferMemoryExtendedInput & { searchOnly: boolean; useFlowiseChatId: boolean } - ) { + constructor(fields: NodeFields) { super(fields) this.initialUserId = fields.memoryOptions?.user_id ?? '' this.userId = this.initialUserId @@ -233,6 +242,8 @@ class Mem0MemoryExtended extends BaseMem0Memory implements MemoryMethods { this.chatflowid = fields.chatflowid this.searchOnly = fields.searchOnly this.useFlowiseChatId = fields.useFlowiseChatId + this.input = fields.input + this.orgId = fields.orgId } // Selects Mem0 user_id based on toggle state (Flowise chat ID or input field) @@ -318,11 +329,16 @@ class Mem0MemoryExtended extends BaseMem0Memory implements MemoryMethods { if (prependMessages?.length) { returnIMessages.unshift(...prependMessages) // Reverted to original simpler unshift - chatMessage.unshift(...(prependMessages as any)) // Cast as any + chatMessage.unshift(...(prependMessages as any)) } if (returnBaseMessages) { - const memoryVariables = await this.loadMemoryVariables({}, overrideUserId) + const memoryVariables = await this.loadMemoryVariables( + { + [this.inputKey]: this.input ?? '' + }, + overrideUserId + ) const mem0History = memoryVariables[this.memoryKey] if (mem0History && typeof mem0History === 'string') { @@ -337,7 +353,7 @@ class Mem0MemoryExtended extends BaseMem0Memory implements MemoryMethods { console.warn('Mem0 history is not a string, cannot prepend directly.') } - return await mapChatMessageToBaseMessage(chatMessage) + return await mapChatMessageToBaseMessage(chatMessage, this.orgId) } return returnIMessages diff --git a/packages/components/nodes/memory/MongoDBMemory/MongoDBMemory.ts b/packages/components/nodes/memory/MongoDBMemory/MongoDBMemory.ts index df70c49497a..0fad704ef00 100644 --- a/packages/components/nodes/memory/MongoDBMemory/MongoDBMemory.ts +++ b/packages/components/nodes/memory/MongoDBMemory/MongoDBMemory.ts @@ -88,9 +88,12 @@ const initializeMongoDB = async (nodeData: INodeData, options: ICommonObject): P const mongoDBConnectUrl = getCredentialParam('mongoDBConnectUrl', credentialData, nodeData) const driverInfo = { name: 'Flowise', version: (await getVersion()).version } + const orgId = options.orgId as string + return new BufferMemoryExtended({ memoryKey: memoryKey ?? 'chat_history', sessionId, + orgId, mongoConnection: { databaseName, collectionName, @@ -102,6 +105,7 @@ const initializeMongoDB = async (nodeData: INodeData, options: ICommonObject): P interface BufferMemoryExtendedInput { sessionId: string + orgId: string mongoConnection: { databaseName: string collectionName: string @@ -112,6 +116,7 @@ interface BufferMemoryExtendedInput { class BufferMemoryExtended extends FlowiseMemory implements MemoryMethods { sessionId = '' + orgId = '' mongoConnection: { databaseName: string collectionName: string @@ -122,6 +127,7 @@ class BufferMemoryExtended extends FlowiseMemory implements MemoryMethods { constructor(fields: BufferMemoryInput & BufferMemoryExtendedInput) { super(fields) this.sessionId = fields.sessionId + this.orgId = fields.orgId this.mongoConnection = fields.mongoConnection } @@ -138,7 +144,7 @@ class BufferMemoryExtended extends FlowiseMemory implements MemoryMethods { const messages = document?.messages || [] const baseMessages = messages.map(mapStoredMessageToChatMessage) if (prependMessages?.length) { - baseMessages.unshift(...(await mapChatMessageToBaseMessage(prependMessages))) + baseMessages.unshift(...(await mapChatMessageToBaseMessage(prependMessages, this.orgId))) } await client.close() diff --git a/packages/components/nodes/memory/RedisBackedChatMemory/RedisBackedChatMemory.ts b/packages/components/nodes/memory/RedisBackedChatMemory/RedisBackedChatMemory.ts index e1813fae7be..757cb68f2cf 100644 --- a/packages/components/nodes/memory/RedisBackedChatMemory/RedisBackedChatMemory.ts +++ b/packages/components/nodes/memory/RedisBackedChatMemory/RedisBackedChatMemory.ts @@ -88,6 +88,7 @@ const initializeRedis = async (nodeData: INodeData, options: ICommonObject): Pro const credentialData = await getCredentialData(nodeData.credential ?? '', options) const redisUrl = getCredentialParam('redisUrl', credentialData, nodeData) + const orgId = options.orgId as string const redisOptions = redisUrl ? redisUrl @@ -104,7 +105,8 @@ const initializeRedis = async (nodeData: INodeData, options: ICommonObject): Pro sessionId, windowSize, sessionTTL, - redisOptions + redisOptions, + orgId }) return memory @@ -114,11 +116,13 @@ interface BufferMemoryExtendedInput { sessionId: string windowSize?: number sessionTTL?: number + orgId: string redisOptions: RedisOptions | string } class BufferMemoryExtended extends FlowiseMemory implements MemoryMethods { sessionId = '' + orgId = '' windowSize?: number sessionTTL?: number redisOptions: RedisOptions | string @@ -128,6 +132,7 @@ class BufferMemoryExtended extends FlowiseMemory implements MemoryMethods { this.sessionId = fields.sessionId this.windowSize = fields.windowSize this.sessionTTL = fields.sessionTTL + this.orgId = fields.orgId this.redisOptions = fields.redisOptions } @@ -165,7 +170,7 @@ class BufferMemoryExtended extends FlowiseMemory implements MemoryMethods { const orderedMessages = rawStoredMessages.reverse().map((message) => JSON.parse(message)) const baseMessages = orderedMessages.map(mapStoredMessageToChatMessage) if (prependMessages?.length) { - baseMessages.unshift(...(await mapChatMessageToBaseMessage(prependMessages))) + baseMessages.unshift(...(await mapChatMessageToBaseMessage(prependMessages, this.orgId))) } return returnBaseMessages ? baseMessages : convertBaseMessagetoIMessage(baseMessages) }) diff --git a/packages/components/nodes/memory/UpstashRedisBackedChatMemory/UpstashRedisBackedChatMemory.ts b/packages/components/nodes/memory/UpstashRedisBackedChatMemory/UpstashRedisBackedChatMemory.ts index 5c3d74dd5b8..3dd7ea5026b 100644 --- a/packages/components/nodes/memory/UpstashRedisBackedChatMemory/UpstashRedisBackedChatMemory.ts +++ b/packages/components/nodes/memory/UpstashRedisBackedChatMemory/UpstashRedisBackedChatMemory.ts @@ -100,13 +100,14 @@ const initalizeUpstashRedis = async (nodeData: INodeData, options: ICommonObject sessionTTL, client }) - + const orgId = options.orgId as string const memory = new BufferMemoryExtended({ memoryKey: memoryKey ?? 'chat_history', chatHistory: redisChatMessageHistory, sessionId, sessionTTL, - redisClient: client + redisClient: client, + orgId }) return memory @@ -115,11 +116,13 @@ const initalizeUpstashRedis = async (nodeData: INodeData, options: ICommonObject interface BufferMemoryExtendedInput { redisClient: Redis sessionId: string + orgId: string sessionTTL?: number } class BufferMemoryExtended extends FlowiseMemory implements MemoryMethods { sessionId = '' + orgId = '' redisClient: Redis sessionTTL?: number @@ -128,6 +131,7 @@ class BufferMemoryExtended extends FlowiseMemory implements MemoryMethods { this.sessionId = fields.sessionId this.redisClient = fields.redisClient this.sessionTTL = fields.sessionTTL + this.orgId = fields.orgId } async getChatMessages( @@ -143,7 +147,7 @@ class BufferMemoryExtended extends FlowiseMemory implements MemoryMethods { const previousMessages = orderedMessages.filter((x): x is StoredMessage => x.type !== undefined && x.data.content !== undefined) const baseMessages = previousMessages.map(mapStoredMessageToChatMessage) if (prependMessages?.length) { - baseMessages.unshift(...(await mapChatMessageToBaseMessage(prependMessages))) + baseMessages.unshift(...(await mapChatMessageToBaseMessage(prependMessages, this.orgId))) } return returnBaseMessages ? baseMessages : convertBaseMessagetoIMessage(baseMessages) } diff --git a/packages/components/nodes/memory/ZepMemory/ZepMemory.ts b/packages/components/nodes/memory/ZepMemory/ZepMemory.ts index 7832d0ff526..a49d3eb4e31 100644 --- a/packages/components/nodes/memory/ZepMemory/ZepMemory.ts +++ b/packages/components/nodes/memory/ZepMemory/ZepMemory.ts @@ -119,6 +119,7 @@ const initializeZep = async (nodeData: INodeData, options: ICommonObject): Promi const credentialData = await getCredentialData(nodeData.credential ?? '', options) const apiKey = getCredentialParam('apiKey', credentialData, nodeData) + const orgId = options.orgId as string const obj: ZepMemoryInput & ZepMemoryExtendedInput = { baseURL, aiPrefix, @@ -127,6 +128,7 @@ const initializeZep = async (nodeData: INodeData, options: ICommonObject): Promi memoryKey, inputKey, sessionId, + orgId, k: k ? parseInt(k, 10) : undefined } if (apiKey) obj.apiKey = apiKey @@ -136,14 +138,17 @@ const initializeZep = async (nodeData: INodeData, options: ICommonObject): Promi interface ZepMemoryExtendedInput { k?: number + orgId: string } class ZepMemoryExtended extends ZepMemory implements MemoryMethods { lastN?: number + orgId = '' constructor(fields: ZepMemoryInput & ZepMemoryExtendedInput) { super(fields) this.lastN = fields.k + this.orgId = fields.orgId } async loadMemoryVariables(values: InputValues, overrideSessionId = ''): Promise { @@ -176,7 +181,7 @@ class ZepMemoryExtended extends ZepMemory implements MemoryMethods { const memoryVariables = await this.loadMemoryVariables({}, id) const baseMessages = memoryVariables[this.memoryKey] if (prependMessages?.length) { - baseMessages.unshift(...(await mapChatMessageToBaseMessage(prependMessages))) + baseMessages.unshift(...(await mapChatMessageToBaseMessage(prependMessages, this.orgId))) } return returnBaseMessages ? baseMessages : convertBaseMessagetoIMessage(baseMessages) } diff --git a/packages/components/nodes/memory/ZepMemoryCloud/ZepMemoryCloud.ts b/packages/components/nodes/memory/ZepMemoryCloud/ZepMemoryCloud.ts index 237e026c721..d6068b01ee7 100644 --- a/packages/components/nodes/memory/ZepMemoryCloud/ZepMemoryCloud.ts +++ b/packages/components/nodes/memory/ZepMemoryCloud/ZepMemoryCloud.ts @@ -113,6 +113,7 @@ const initializeZep = async (nodeData: INodeData, options: ICommonObject): Promi const credentialData = await getCredentialData(nodeData.credential ?? '', options) const apiKey = getCredentialParam('apiKey', credentialData, nodeData) + const orgId = options.orgId as string const obj: ZepMemoryInput & ZepMemoryExtendedInput = { apiKey, aiPrefix, @@ -121,7 +122,8 @@ const initializeZep = async (nodeData: INodeData, options: ICommonObject): Promi sessionId, inputKey, memoryType: memoryType, - returnMessages: true + returnMessages: true, + orgId } return new ZepMemoryExtended(obj) @@ -129,14 +131,17 @@ const initializeZep = async (nodeData: INodeData, options: ICommonObject): Promi interface ZepMemoryExtendedInput { memoryType?: 'perpetual' | 'message_window' + orgId: string } class ZepMemoryExtended extends ZepMemory implements MemoryMethods { memoryType: 'perpetual' | 'message_window' + orgId: string constructor(fields: ZepMemoryInput & ZepMemoryExtendedInput) { super(fields) this.memoryType = fields.memoryType ?? 'perpetual' + this.orgId = fields.orgId } async loadMemoryVariables(values: InputValues, overrideSessionId = ''): Promise { @@ -169,7 +174,7 @@ class ZepMemoryExtended extends ZepMemory implements MemoryMethods { const memoryVariables = await this.loadMemoryVariables({}, id) const baseMessages = memoryVariables[this.memoryKey] if (prependMessages?.length) { - baseMessages.unshift(...(await mapChatMessageToBaseMessage(prependMessages))) + baseMessages.unshift(...(await mapChatMessageToBaseMessage(prependMessages, this.orgId))) } return returnBaseMessages ? baseMessages : convertBaseMessagetoIMessage(baseMessages) } diff --git a/packages/components/nodes/multiagents/Worker/Worker.ts b/packages/components/nodes/multiagents/Worker/Worker.ts index 5651135a14d..06a5d059cc2 100644 --- a/packages/components/nodes/multiagents/Worker/Worker.ts +++ b/packages/components/nodes/multiagents/Worker/Worker.ts @@ -233,7 +233,7 @@ async function createAgent( sessionId: flowObj?.sessionId, chatId: flowObj?.chatId, input: flowObj?.input, - verbose: process.env.DEBUG === 'true', + verbose: process.env.DEBUG === 'true' ? true : false, maxIterations: maxIterations ? parseFloat(maxIterations) : undefined }) return executor diff --git a/packages/components/nodes/prompts/ChatPromptTemplate/ChatPromptTemplate.ts b/packages/components/nodes/prompts/ChatPromptTemplate/ChatPromptTemplate.ts index d3faf712774..5cde62c437a 100644 --- a/packages/components/nodes/prompts/ChatPromptTemplate/ChatPromptTemplate.ts +++ b/packages/components/nodes/prompts/ChatPromptTemplate/ChatPromptTemplate.ts @@ -120,7 +120,7 @@ class ChatPromptTemplate_Prompts implements INode { ) { const appDataSource = options.appDataSource as DataSource const databaseEntities = options.databaseEntities as IDatabaseEntity - const vm = await getVM(appDataSource, databaseEntities, nodeData, {}) + const vm = await getVM(appDataSource, databaseEntities, nodeData, options, {}) try { const response = await vm.run(`module.exports = async function() {${messageHistoryCode}}()`, __dirname) if (!Array.isArray(response)) throw new Error('Returned message history must be an array') diff --git a/packages/components/nodes/recordmanager/PostgresRecordManager/PostgresRecordManager.ts b/packages/components/nodes/recordmanager/PostgresRecordManager/PostgresRecordManager.ts index ebb429af0ac..cf239522f5b 100644 --- a/packages/components/nodes/recordmanager/PostgresRecordManager/PostgresRecordManager.ts +++ b/packages/components/nodes/recordmanager/PostgresRecordManager/PostgresRecordManager.ts @@ -227,8 +227,6 @@ class PostgresRecordManager implements RecordManagerInterface { const queryRunner = dataSource.createQueryRunner() const tableName = this.sanitizeTableName(this.tableName) - await queryRunner.query('CREATE EXTENSION IF NOT EXISTS pgcrypto;') - await queryRunner.manager.query(` CREATE TABLE IF NOT EXISTS "${tableName}" ( uuid UUID PRIMARY KEY DEFAULT gen_random_uuid(), @@ -260,9 +258,9 @@ class PostgresRecordManager implements RecordManagerInterface { const dataSource = await this.getDataSource() try { const queryRunner = dataSource.createQueryRunner() - const res = await queryRunner.manager.query('SELECT EXTRACT(EPOCH FROM CURRENT_TIMESTAMP) AS now') + const res = await queryRunner.manager.query('SELECT EXTRACT(EPOCH FROM CURRENT_TIMESTAMP) AS extract') await queryRunner.release() - return Number.parseFloat(res[0].now) + return Number.parseFloat(res[0].extract) } catch (error) { console.error('Error getting time in PostgresRecordManager:') throw error diff --git a/packages/components/nodes/sequentialagents/Agent/Agent.ts b/packages/components/nodes/sequentialagents/Agent/Agent.ts index ad626207429..cb4e03e7e15 100644 --- a/packages/components/nodes/sequentialagents/Agent/Agent.ts +++ b/packages/components/nodes/sequentialagents/Agent/Agent.ts @@ -680,7 +680,7 @@ async function createAgent( sessionId: flowObj?.sessionId, chatId: flowObj?.chatId, input: flowObj?.input, - verbose: process.env.DEBUG === 'true', + verbose: process.env.DEBUG === 'true' ? true : false, maxIterations: maxIterations ? parseFloat(maxIterations) : undefined }) return executor @@ -877,7 +877,7 @@ const getReturnOutput = async (nodeData: INodeData, input: string, options: ICom const updateStateMemory = nodeData.inputs?.updateStateMemory as string const selectedTab = tabIdentifier ? tabIdentifier.split(`_${nodeData.id}`)[0] : 'updateStateMemoryUI' - const variables = await getVars(appDataSource, databaseEntities, nodeData) + const variables = await getVars(appDataSource, databaseEntities, nodeData, options) const flow = { chatflowId: options.chatflowid, @@ -930,7 +930,7 @@ const getReturnOutput = async (nodeData: INodeData, input: string, options: ICom throw new Error(e) } } else if (selectedTab === 'updateStateMemoryCode' && updateStateMemoryCode) { - const vm = await getVM(appDataSource, databaseEntities, nodeData, flow) + const vm = await getVM(appDataSource, databaseEntities, nodeData, options, flow) try { const response = await vm.run(`module.exports = async function() {${updateStateMemoryCode}}()`, __dirname) if (typeof response !== 'object') throw new Error('Return output must be an object') diff --git a/packages/components/nodes/sequentialagents/Condition/Condition.ts b/packages/components/nodes/sequentialagents/Condition/Condition.ts index 1455c17b1a7..1f855091fc9 100644 --- a/packages/components/nodes/sequentialagents/Condition/Condition.ts +++ b/packages/components/nodes/sequentialagents/Condition/Condition.ts @@ -267,7 +267,7 @@ const runCondition = async (nodeData: INodeData, input: string, options: ICommon const tabIdentifier = nodeData.inputs?.[`${TAB_IDENTIFIER}_${nodeData.id}`] as string const selectedTab = tabIdentifier ? tabIdentifier.split(`_${nodeData.id}`)[0] : 'conditionUI' - const variables = await getVars(appDataSource, databaseEntities, nodeData) + const variables = await getVars(appDataSource, databaseEntities, nodeData, options) const flow = { chatflowId: options.chatflowid, @@ -279,7 +279,7 @@ const runCondition = async (nodeData: INodeData, input: string, options: ICommon } if (selectedTab === 'conditionFunction' && conditionFunction) { - const vm = await getVM(appDataSource, databaseEntities, nodeData, flow) + const vm = await getVM(appDataSource, databaseEntities, nodeData, options, flow) try { const response = await vm.run(`module.exports = async function() {${conditionFunction}}()`, __dirname) if (typeof response !== 'string') throw new Error('Condition function must return a string') diff --git a/packages/components/nodes/sequentialagents/ConditionAgent/ConditionAgent.ts b/packages/components/nodes/sequentialagents/ConditionAgent/ConditionAgent.ts index ae56efe129b..3c1411c4f16 100644 --- a/packages/components/nodes/sequentialagents/ConditionAgent/ConditionAgent.ts +++ b/packages/components/nodes/sequentialagents/ConditionAgent/ConditionAgent.ts @@ -540,7 +540,7 @@ const runCondition = async ( result = { ...jsonResult, additional_kwargs: { nodeId: nodeData.id } } } - const variables = await getVars(appDataSource, databaseEntities, nodeData) + const variables = await getVars(appDataSource, databaseEntities, nodeData, options) const flow = { chatflowId: options.chatflowid, @@ -553,7 +553,7 @@ const runCondition = async ( } if (selectedTab === 'conditionFunction' && conditionFunction) { - const vm = await getVM(appDataSource, databaseEntities, nodeData, flow) + const vm = await getVM(appDataSource, databaseEntities, nodeData, options, flow) try { const response = await vm.run(`module.exports = async function() {${conditionFunction}}()`, __dirname) if (typeof response !== 'string') throw new Error('Condition function must return a string') diff --git a/packages/components/nodes/sequentialagents/CustomFunction/CustomFunction.ts b/packages/components/nodes/sequentialagents/CustomFunction/CustomFunction.ts index b7d831e17d0..493102b0bd2 100644 --- a/packages/components/nodes/sequentialagents/CustomFunction/CustomFunction.ts +++ b/packages/components/nodes/sequentialagents/CustomFunction/CustomFunction.ts @@ -102,7 +102,7 @@ class CustomFunction_SeqAgents implements INode { if (!sequentialNodes || !sequentialNodes.length) throw new Error('Custom function must have a predecessor!') const executeFunc = async (state: ISeqAgentsState) => { - const variables = await getVars(appDataSource, databaseEntities, nodeData) + const variables = await getVars(appDataSource, databaseEntities, nodeData, options) const flow = { chatflowId: options.chatflowid, sessionId: options.sessionId, diff --git a/packages/components/nodes/sequentialagents/ExecuteFlow/ExecuteFlow.ts b/packages/components/nodes/sequentialagents/ExecuteFlow/ExecuteFlow.ts index 479ebfbb48a..919b7e7a2f4 100644 --- a/packages/components/nodes/sequentialagents/ExecuteFlow/ExecuteFlow.ts +++ b/packages/components/nodes/sequentialagents/ExecuteFlow/ExecuteFlow.ts @@ -141,7 +141,8 @@ class ExecuteFlow_SeqAgents implements INode { return returnData } - const chatflows = await appDataSource.getRepository(databaseEntities['ChatFlow']).find() + const searchOptions = options.searchOptions || {} + const chatflows = await appDataSource.getRepository(databaseEntities['ChatFlow']).findBy(searchOptions) for (let i = 0; i < chatflows.length; i += 1) { const data = { @@ -189,7 +190,7 @@ class ExecuteFlow_SeqAgents implements INode { const chatId = options.chatId const executeFunc = async (state: ISeqAgentsState) => { - const variables = await getVars(appDataSource, databaseEntities, nodeData) + const variables = await getVars(appDataSource, databaseEntities, nodeData, options) let flowInput = '' if (seqExecuteFlowInput === 'userQuestion') { @@ -223,7 +224,7 @@ class ExecuteFlow_SeqAgents implements INode { } } - const options = { + const callOptions = { method: 'POST', headers: { 'Content-Type': 'application/json', @@ -234,7 +235,7 @@ class ExecuteFlow_SeqAgents implements INode { let sandbox: ICommonObject = { $input: flowInput, - $callOptions: options, + $callOptions: callOptions, $callBody: body, util: undefined, Symbol: undefined, diff --git a/packages/components/nodes/sequentialagents/LLMNode/LLMNode.ts b/packages/components/nodes/sequentialagents/LLMNode/LLMNode.ts index 371a8986ff0..f2c31ac90fb 100644 --- a/packages/components/nodes/sequentialagents/LLMNode/LLMNode.ts +++ b/packages/components/nodes/sequentialagents/LLMNode/LLMNode.ts @@ -668,7 +668,7 @@ const getReturnOutput = async (nodeData: INodeData, input: string, options: ICom const updateStateMemory = nodeData.inputs?.updateStateMemory as string const selectedTab = tabIdentifier ? tabIdentifier.split(`_${nodeData.id}`)[0] : 'updateStateMemoryUI' - const variables = await getVars(appDataSource, databaseEntities, nodeData) + const variables = await getVars(appDataSource, databaseEntities, nodeData, options) const flow = { chatflowId: options.chatflowid, @@ -721,7 +721,7 @@ const getReturnOutput = async (nodeData: INodeData, input: string, options: ICom throw new Error(e) } } else if (selectedTab === 'updateStateMemoryCode' && updateStateMemoryCode) { - const vm = await getVM(appDataSource, databaseEntities, nodeData, flow) + const vm = await getVM(appDataSource, databaseEntities, nodeData, options, flow) try { const response = await vm.run(`module.exports = async function() {${updateStateMemoryCode}}()`, __dirname) if (typeof response !== 'object') throw new Error('Return output must be an object') diff --git a/packages/components/nodes/sequentialagents/State/State.ts b/packages/components/nodes/sequentialagents/State/State.ts index 5c48cb7a782..92e0769d5a5 100644 --- a/packages/components/nodes/sequentialagents/State/State.ts +++ b/packages/components/nodes/sequentialagents/State/State.ts @@ -190,7 +190,7 @@ class State_SeqAgents implements INode { throw new Error(e) } } else if (selectedTab === 'stateMemoryCode' && stateMemoryCode) { - const variables = await getVars(appDataSource, databaseEntities, nodeData) + const variables = await getVars(appDataSource, databaseEntities, nodeData, options) const flow = { chatflowId: options.chatflowid, sessionId: options.sessionId, diff --git a/packages/components/nodes/sequentialagents/ToolNode/ToolNode.ts b/packages/components/nodes/sequentialagents/ToolNode/ToolNode.ts index fda82aec95b..cc6a260cf8b 100644 --- a/packages/components/nodes/sequentialagents/ToolNode/ToolNode.ts +++ b/packages/components/nodes/sequentialagents/ToolNode/ToolNode.ts @@ -498,7 +498,7 @@ const getReturnOutput = async ( const updateStateMemory = nodeData.inputs?.updateStateMemory as string const selectedTab = tabIdentifier ? tabIdentifier.split(`_${nodeData.id}`)[0] : 'updateStateMemoryUI' - const variables = await getVars(appDataSource, databaseEntities, nodeData) + const variables = await getVars(appDataSource, databaseEntities, nodeData, options) const reformattedOutput = outputs.map((output) => { return { @@ -561,7 +561,7 @@ const getReturnOutput = async ( throw new Error(e) } } else if (selectedTab === 'updateStateMemoryCode' && updateStateMemoryCode) { - const vm = await getVM(appDataSource, databaseEntities, nodeData, flow) + const vm = await getVM(appDataSource, databaseEntities, nodeData, options, flow) try { const response = await vm.run(`module.exports = async function() {${updateStateMemoryCode}}()`, __dirname) if (typeof response !== 'object') throw new Error('Return output must be an object') diff --git a/packages/components/nodes/sequentialagents/commonUtils.ts b/packages/components/nodes/sequentialagents/commonUtils.ts index 3fe298f7f04..5769da816d6 100644 --- a/packages/components/nodes/sequentialagents/commonUtils.ts +++ b/packages/components/nodes/sequentialagents/commonUtils.ts @@ -150,8 +150,14 @@ export const processImageMessage = async (llm: BaseChatModel, nodeData: INodeDat return multiModalMessageContent } -export const getVM = async (appDataSource: DataSource, databaseEntities: IDatabaseEntity, nodeData: INodeData, flow: ICommonObject) => { - const variables = await getVars(appDataSource, databaseEntities, nodeData) +export const getVM = async ( + appDataSource: DataSource, + databaseEntities: IDatabaseEntity, + nodeData: INodeData, + options: ICommonObject, + flow: ICommonObject +) => { + const variables = await getVars(appDataSource, databaseEntities, nodeData, options) let sandbox: any = { util: undefined, @@ -420,7 +426,7 @@ export const checkMessageHistory = async ( if (messageHistory) { const appDataSource = options.appDataSource as DataSource const databaseEntities = options.databaseEntities as IDatabaseEntity - const vm = await getVM(appDataSource, databaseEntities, nodeData, {}) + const vm = await getVM(appDataSource, databaseEntities, nodeData, options, {}) try { const response = await vm.run(`module.exports = async function() {${messageHistory}}()`, __dirname) if (!Array.isArray(response)) throw new Error('Returned message history must be an array') diff --git a/packages/components/nodes/tools/ChainTool/core.ts b/packages/components/nodes/tools/ChainTool/core.ts index 1848b81caca..e43c126f8d5 100644 --- a/packages/components/nodes/tools/ChainTool/core.ts +++ b/packages/components/nodes/tools/ChainTool/core.ts @@ -14,17 +14,41 @@ export class ChainTool extends DynamicTool { super({ ...rest, func: async (input, runManager) => { - const childManagers = runManager?.getChild() - const handlers = childManagers?.handlers?.filter((handler) => !(handler instanceof CustomChainHandler)) || [] - if (childManagers) childManagers.handlers = handlers + // prevent sending SSE events of the sub-chain + const sseStreamer = runManager?.handlers.find((handler) => handler instanceof CustomChainHandler)?.sseStreamer + if (runManager) { + const callbacks = runManager.handlers + for (let i = 0; i < callbacks.length; i += 1) { + if (callbacks[i] instanceof CustomChainHandler) { + ;(callbacks[i] as any).sseStreamer = undefined + } + } + } if ((chain as any).prompt && (chain as any).prompt.promptValues) { const promptValues = handleEscapeCharacters((chain as any).prompt.promptValues, true) - const values = await chain.call(promptValues, childManagers) + + const values = await chain.call(promptValues, runManager?.getChild()) + if (runManager && sseStreamer) { + const callbacks = runManager.handlers + for (let i = 0; i < callbacks.length; i += 1) { + if (callbacks[i] instanceof CustomChainHandler) { + ;(callbacks[i] as any).sseStreamer = sseStreamer + } + } + } return values?.text } - const values = chain.run(input, childManagers) + const values = chain.run(input, runManager?.getChild()) + if (runManager && sseStreamer) { + const callbacks = runManager.handlers + for (let i = 0; i < callbacks.length; i += 1) { + if (callbacks[i] instanceof CustomChainHandler) { + ;(callbacks[i] as any).sseStreamer = sseStreamer + } + } + } return values } }) diff --git a/packages/components/nodes/tools/ChatflowTool/ChatflowTool.ts b/packages/components/nodes/tools/ChatflowTool/ChatflowTool.ts index c22e0f35c48..e473357f8ea 100644 --- a/packages/components/nodes/tools/ChatflowTool/ChatflowTool.ts +++ b/packages/components/nodes/tools/ChatflowTool/ChatflowTool.ts @@ -122,7 +122,8 @@ class ChatflowTool_Tools implements INode { return returnData } - const chatflows = await appDataSource.getRepository(databaseEntities['ChatFlow']).find() + const searchOptions = options.searchOptions || {} + const chatflows = await appDataSource.getRepository(databaseEntities['ChatFlow']).findBy(searchOptions) for (let i = 0; i < chatflows.length; i += 1) { const data = { diff --git a/packages/components/nodes/tools/CodeInterpreterE2B/CodeInterpreterE2B.ts b/packages/components/nodes/tools/CodeInterpreterE2B/CodeInterpreterE2B.ts index 8c3967dc927..8d60a823a2a 100644 --- a/packages/components/nodes/tools/CodeInterpreterE2B/CodeInterpreterE2B.ts +++ b/packages/components/nodes/tools/CodeInterpreterE2B/CodeInterpreterE2B.ts @@ -80,7 +80,8 @@ class Code_Interpreter_Tools implements INode { schema: z.object({ input: z.string().describe('Python code to be executed in the sandbox environment') }), - chatflowid: options.chatflowid + chatflowid: options.chatflowid, + orgId: options.orgId }) } } @@ -92,6 +93,7 @@ type E2BToolInput = { apiKey: string schema: any chatflowid: string + orgId: string templateCodeInterpreterE2B?: string domainCodeInterpreterE2B?: string } @@ -113,6 +115,8 @@ export class E2BTool extends StructuredTool { chatflowid: string + orgId: string + flowObj: ICommonObject templateCodeInterpreterE2B?: string @@ -125,6 +129,7 @@ export class E2BTool extends StructuredTool { this.apiKey = options.apiKey this.schema = options.schema this.chatflowid = options.chatflowid + this.orgId = options.orgId this.templateCodeInterpreterE2B = options.templateCodeInterpreterE2B this.domainCodeInterpreterE2B = options.domainCodeInterpreterE2B } @@ -136,6 +141,7 @@ export class E2BTool extends StructuredTool { apiKey: options.apiKey, schema: options.schema, chatflowid: options.chatflowid, + orgId: options.orgId, templateCodeInterpreterE2B: options.templateCodeInterpreterE2B, domainCodeInterpreterE2B: options.domainCodeInterpreterE2B }) @@ -212,28 +218,33 @@ export class E2BTool extends StructuredTool { const filename = `artifact_${Date.now()}.png` - const res = await addSingleFileToStorage( + // Don't check storage usage because this is incoming file, and if we throw error, agent will keep on retrying + const { path } = await addSingleFileToStorage( 'image/png', pngData, filename, + this.orgId, this.chatflowid, flowConfig!.chatId as string ) - artifacts.push({ type: 'png', data: res }) + + artifacts.push({ type: 'png', data: path }) } else if (key === 'jpeg') { //@ts-ignore const jpegData = Buffer.from(result.jpeg, 'base64') const filename = `artifact_${Date.now()}.jpg` - const res = await addSingleFileToStorage( + const { path } = await addSingleFileToStorage( 'image/jpg', jpegData, filename, + this.orgId, this.chatflowid, flowConfig!.chatId as string ) - artifacts.push({ type: 'jpeg', data: res }) + + artifacts.push({ type: 'jpeg', data: path }) } else if (key === 'html' || key === 'markdown' || key === 'latex' || key === 'json' || key === 'javascript') { artifacts.push({ type: key, data: (result as any)[key] }) } //TODO: support for pdf diff --git a/packages/components/nodes/tools/CustomTool/CustomTool.ts b/packages/components/nodes/tools/CustomTool/CustomTool.ts index f82b5d4f7c2..1ac5a326a9c 100644 --- a/packages/components/nodes/tools/CustomTool/CustomTool.ts +++ b/packages/components/nodes/tools/CustomTool/CustomTool.ts @@ -77,7 +77,8 @@ class CustomTool_Tools implements INode { return returnData } - const tools = await appDataSource.getRepository(databaseEntities['Tool']).find() + const searchOptions = options.searchOptions || {} + const tools = await appDataSource.getRepository(databaseEntities['Tool']).findBy(searchOptions) for (let i = 0; i < tools.length; i += 1) { const data = { @@ -122,7 +123,7 @@ class CustomTool_Tools implements INode { obj.schema = zodSchemaFunction(z) } - const variables = await getVars(appDataSource, databaseEntities, nodeData) + const variables = await getVars(appDataSource, databaseEntities, nodeData, options) const flow = { chatflowId: options.chatflowid } diff --git a/packages/components/nodes/tools/MCP/CustomMCP/CustomMCP.ts b/packages/components/nodes/tools/MCP/CustomMCP/CustomMCP.ts index b24144ea561..c782a357cf8 100644 --- a/packages/components/nodes/tools/MCP/CustomMCP/CustomMCP.ts +++ b/packages/components/nodes/tools/MCP/CustomMCP/CustomMCP.ts @@ -1,12 +1,34 @@ import { Tool } from '@langchain/core/tools' -import { INode, INodeData, INodeOptionsValue, INodeParams } from '../../../../src/Interface' +import { ICommonObject, IDatabaseEntity, INode, INodeData, INodeOptionsValue, INodeParams } from '../../../../src/Interface' import { MCPToolkit } from '../core' +import { getVars, prepareSandboxVars } from '../../../../src/utils' +import { DataSource } from 'typeorm' const mcpServerConfig = `{ "command": "npx", "args": ["-y", "@modelcontextprotocol/server-filesystem", "/path/to/allowed/files"] }` +const howToUseCode = ` +You can use variables in the MCP Server Config with double curly braces \`{{ }}\` and prefix \`$vars.\`. + +For example, you have a variable called "var1": +\`\`\`json +{ + "command": "docker", + "args": [ + "run", + "-i", + "--rm", + "-e", "API_TOKEN" + ], + "env": { + "API_TOKEN": "{{$vars.var1}}" + } +} +\`\`\` +` + class Custom_MCP implements INode { label: string name: string @@ -23,7 +45,7 @@ class Custom_MCP implements INode { constructor() { this.label = 'Custom MCP' this.name = 'customMCP' - this.version = 1.0 + this.version = 1.1 this.type = 'Custom MCP Tool' this.icon = 'customMCP.png' this.category = 'Tools (MCP)' @@ -35,6 +57,10 @@ class Custom_MCP implements INode { name: 'mcpServerConfig', type: 'code', hideCodeExecute: true, + hint: { + label: 'How to use', + value: howToUseCode + }, placeholder: mcpServerConfig }, { @@ -50,9 +76,9 @@ class Custom_MCP implements INode { //@ts-ignore loadMethods = { - listActions: async (nodeData: INodeData): Promise => { + listActions: async (nodeData: INodeData, options: ICommonObject): Promise => { try { - const toolset = await this.getTools(nodeData) + const toolset = await this.getTools(nodeData, options) toolset.sort((a: any, b: any) => a.name.localeCompare(b.name)) return toolset.map(({ name, ...rest }) => ({ @@ -72,8 +98,8 @@ class Custom_MCP implements INode { } } - async init(nodeData: INodeData): Promise { - const tools = await this.getTools(nodeData) + async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { + const tools = await this.getTools(nodeData, options) const _mcpActions = nodeData.inputs?.mcpActions let mcpActions = [] @@ -88,19 +114,29 @@ class Custom_MCP implements INode { return tools.filter((tool: any) => mcpActions.includes(tool.name)) } - async getTools(nodeData: INodeData): Promise { + async getTools(nodeData: INodeData, options: ICommonObject): Promise { const mcpServerConfig = nodeData.inputs?.mcpServerConfig as string - if (!mcpServerConfig) { throw new Error('MCP Server Config is required') } + let sandbox: ICommonObject = {} + + if (mcpServerConfig.includes('$vars')) { + const appDataSource = options.appDataSource as DataSource + const databaseEntities = options.databaseEntities as IDatabaseEntity + + const variables = await getVars(appDataSource, databaseEntities, nodeData, options) + sandbox['$vars'] = prepareSandboxVars(variables) + } + try { let serverParams if (typeof mcpServerConfig === 'object') { - serverParams = mcpServerConfig + serverParams = substituteVariablesInObject(mcpServerConfig, sandbox) } else if (typeof mcpServerConfig === 'string') { - const serverParamsString = convertToValidJSONString(mcpServerConfig) + const substitutedString = substituteVariablesInString(mcpServerConfig, sandbox) + const serverParamsString = convertToValidJSONString(substitutedString) serverParams = JSON.parse(serverParamsString) } @@ -123,6 +159,67 @@ class Custom_MCP implements INode { } } +function substituteVariablesInObject(obj: any, sandbox: any): any { + if (typeof obj === 'string') { + // Replace variables in string values + return substituteVariablesInString(obj, sandbox) + } else if (Array.isArray(obj)) { + // Recursively process arrays + return obj.map((item) => substituteVariablesInObject(item, sandbox)) + } else if (obj !== null && typeof obj === 'object') { + // Recursively process object properties + const result: any = {} + for (const [key, value] of Object.entries(obj)) { + result[key] = substituteVariablesInObject(value, sandbox) + } + return result + } + // Return primitive values as-is + return obj +} + +function substituteVariablesInString(str: string, sandbox: any): string { + // Use regex to find {{$variableName.property}} patterns and replace with sandbox values + return str.replace(/\{\{\$([a-zA-Z_][a-zA-Z0-9_]*(?:\.[a-zA-Z_][a-zA-Z0-9_]*)*)\}\}/g, (match, variablePath) => { + try { + // Split the path into parts (e.g., "vars.testvar1" -> ["vars", "testvar1"]) + const pathParts = variablePath.split('.') + + // Start with the sandbox object + let current = sandbox + + // Navigate through the path + for (const part of pathParts) { + // For the first part, check if it exists with $ prefix + if (current === sandbox) { + const sandboxKey = `$${part}` + if (Object.keys(current).includes(sandboxKey)) { + current = current[sandboxKey] + } else { + // If the key doesn't exist, return the original match + return match + } + } else { + // For subsequent parts, access directly + if (current && typeof current === 'object' && part in current) { + current = current[part] + } else { + // If the property doesn't exist, return the original match + return match + } + } + } + + // Return the resolved value, converting to string if necessary + return typeof current === 'string' ? current : JSON.stringify(current) + } catch (error) { + // If any error occurs during resolution, return the original match + console.warn(`Error resolving variable ${match}:`, error) + return match + } + }) +} + function convertToValidJSONString(inputString: string) { try { const jsObject = Function('return ' + inputString)() diff --git a/packages/components/nodes/tools/MCP/Supergateway/SupergatewayMCP.ts b/packages/components/nodes/tools/MCP/Supergateway/SupergatewayMCP.ts new file mode 100644 index 00000000000..b8e1d628510 --- /dev/null +++ b/packages/components/nodes/tools/MCP/Supergateway/SupergatewayMCP.ts @@ -0,0 +1,119 @@ +import { Tool } from '@langchain/core/tools' +import { ICommonObject, INode, INodeData, INodeOptionsValue, INodeParams } from '../../../../src/Interface' +import { getNodeModulesPackagePath } from '../../../../src/utils' +import { MCPToolkit } from '../core' + +class Supergateway_MCP implements INode { + label: string + name: string + version: number + description: string + type: string + icon: string + category: string + baseClasses: string[] + documentation: string + credential: INodeParams + inputs: INodeParams[] + + constructor() { + this.label = 'Supergateway MCP' + this.name = 'supergatewayMCP' + this.version = 1.0 + this.type = 'Supergateway MCP Tool' + this.icon = 'supermachine-logo.png' + this.category = 'Tools (MCP)' + this.description = 'Runs MCP stdio-based servers over SSE (Server-Sent Events) or WebSockets (WS)' + this.documentation = 'https://github.com/supercorp-ai/supergateway' + this.inputs = [ + { + label: 'Arguments', + name: 'arguments', + type: 'string', + rows: 4, + placeholder: '--sse "https://mcp-server-ab71a6b2-cd55-49d0-adba-562bc85956e3.supermachine.app"', + description: + 'Arguments to pass to the supergateway server. Refer to the documentation for more information.' + }, + { + label: 'Available Actions', + name: 'mcpActions', + type: 'asyncMultiOptions', + loadMethod: 'listActions', + refresh: true + } + ] + this.baseClasses = ['Tool'] + } + + //@ts-ignore + loadMethods = { + listActions: async (nodeData: INodeData, options: ICommonObject): Promise => { + try { + const toolset = await this.getTools(nodeData, options) + toolset.sort((a: any, b: any) => a.name.localeCompare(b.name)) + + return toolset.map(({ name, ...rest }) => ({ + label: name.toUpperCase(), + name: name, + description: rest.description || name + })) + } catch (error) { + return [ + { + label: 'No Available Actions', + name: 'error', + description: 'No available actions, please check the arguments again and refresh' + } + ] + } + } + } + + async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { + const tools = await this.getTools(nodeData, options) + + const _mcpActions = nodeData.inputs?.mcpActions + let mcpActions = [] + if (_mcpActions) { + try { + mcpActions = typeof _mcpActions === 'string' ? JSON.parse(_mcpActions) : _mcpActions + } catch (error) { + console.error('Error parsing mcp actions:', error) + } + } + + return tools.filter((tool: any) => mcpActions.includes(tool.name)) + } + + async getTools(nodeData: INodeData, _: ICommonObject): Promise { + const _args = nodeData.inputs?.arguments as string + const packagePath = getNodeModulesPackagePath('supergateway/dist/index.js') + + const serverParams = { + command: 'node', + args: [ + packagePath, + ..._args + .trim() + .split(/\s+/) + .map((arg) => { + // Remove surrounding double or single quotes if they exist + if ((arg.startsWith('"') && arg.endsWith('"')) || (arg.startsWith("'") && arg.endsWith("'"))) { + return arg.slice(1, -1) + } + return arg + }) + ] + } + + const toolkit = new MCPToolkit(serverParams, 'stdio') + await toolkit.initialize() + + const tools = toolkit.tools ?? [] + + return tools as Tool[] + } +} + +module.exports = { nodeClass: Supergateway_MCP } diff --git a/packages/components/nodes/tools/MCP/Supergateway/supermachine-logo.png b/packages/components/nodes/tools/MCP/Supergateway/supermachine-logo.png new file mode 100644 index 00000000000..4ff417e80e2 Binary files /dev/null and b/packages/components/nodes/tools/MCP/Supergateway/supermachine-logo.png differ diff --git a/packages/components/nodes/tools/OpenAPIToolkit/OpenAPIToolkit.ts b/packages/components/nodes/tools/OpenAPIToolkit/OpenAPIToolkit.ts index d44f5f103f9..23ef589aed0 100644 --- a/packages/components/nodes/tools/OpenAPIToolkit/OpenAPIToolkit.ts +++ b/packages/components/nodes/tools/OpenAPIToolkit/OpenAPIToolkit.ts @@ -85,8 +85,9 @@ class OpenAPIToolkit_Tools implements INode { let data if (yamlFileBase64.startsWith('FILE-STORAGE::')) { const file = yamlFileBase64.replace('FILE-STORAGE::', '') + const orgId = options.orgId const chatflowid = options.chatflowid - const fileData = await getFileFromStorage(file, chatflowid) + const fileData = await getFileFromStorage(file, orgId, chatflowid) const utf8String = fileData.toString('utf-8') data = load(utf8String) @@ -110,7 +111,7 @@ class OpenAPIToolkit_Tools implements INode { const appDataSource = options.appDataSource as DataSource const databaseEntities = options.databaseEntities as IDatabaseEntity - const variables = await getVars(appDataSource, databaseEntities, nodeData) + const variables = await getVars(appDataSource, databaseEntities, nodeData, options) const flow = { chatflowId: options.chatflowid } diff --git a/packages/components/nodes/utilities/CustomFunction/CustomFunction.ts b/packages/components/nodes/utilities/CustomFunction/CustomFunction.ts index 079f186ba22..cd09b271735 100644 --- a/packages/components/nodes/utilities/CustomFunction/CustomFunction.ts +++ b/packages/components/nodes/utilities/CustomFunction/CustomFunction.ts @@ -83,7 +83,7 @@ class CustomFunction_Utilities implements INode { const databaseEntities = options.databaseEntities as IDatabaseEntity const tools = Object.fromEntries((flatten(nodeData.inputs?.tools) as StructuredTool[])?.map((tool) => [tool.name, tool]) ?? []) - const variables = await getVars(appDataSource, databaseEntities, nodeData) + const variables = await getVars(appDataSource, databaseEntities, nodeData, options) const flow = { chatflowId: options.chatflowid, sessionId: options.sessionId, diff --git a/packages/components/nodes/utilities/IfElseFunction/IfElseFunction.ts b/packages/components/nodes/utilities/IfElseFunction/IfElseFunction.ts index 08b81163652..3eb15ffeb6f 100644 --- a/packages/components/nodes/utilities/IfElseFunction/IfElseFunction.ts +++ b/packages/components/nodes/utilities/IfElseFunction/IfElseFunction.ts @@ -85,7 +85,7 @@ class IfElseFunction_Utilities implements INode { const appDataSource = options.appDataSource as DataSource const databaseEntities = options.databaseEntities as IDatabaseEntity - const variables = await getVars(appDataSource, databaseEntities, nodeData) + const variables = await getVars(appDataSource, databaseEntities, nodeData, options) const flow = { chatflowId: options.chatflowid, sessionId: options.sessionId, diff --git a/packages/components/nodes/vectorstores/Chroma/Chroma.ts b/packages/components/nodes/vectorstores/Chroma/Chroma.ts index 62f4b8a644b..90fb2c552e6 100644 --- a/packages/components/nodes/vectorstores/Chroma/Chroma.ts +++ b/packages/components/nodes/vectorstores/Chroma/Chroma.ts @@ -213,7 +213,6 @@ class Chroma_VectorStores implements INode { const chromaApiKey = getCredentialParam('chromaApiKey', credentialData, nodeData) const chromaTenant = getCredentialParam('chromaTenant', credentialData, nodeData) const chromaDatabase = getCredentialParam('chromaDatabase', credentialData, nodeData) - const chromaMetadataFilter = nodeData.inputs?.chromaMetadataFilter const obj: { diff --git a/packages/components/nodes/vectorstores/DocumentStoreVS/DocStoreVector.ts b/packages/components/nodes/vectorstores/DocumentStoreVS/DocStoreVector.ts index ba2660e0b2a..0f228d1fb79 100644 --- a/packages/components/nodes/vectorstores/DocumentStoreVS/DocStoreVector.ts +++ b/packages/components/nodes/vectorstores/DocumentStoreVS/DocStoreVector.ts @@ -56,7 +56,8 @@ class DocStore_VectorStores implements INode { return returnData } - const stores = await appDataSource.getRepository(databaseEntities['DocumentStore']).find() + const searchOptions = options.searchOptions || {} + const stores = await appDataSource.getRepository(databaseEntities['DocumentStore']).findBy(searchOptions) for (const store of stores) { if (store.status === 'UPSERTED') { const obj = { diff --git a/packages/components/nodes/vectorstores/Redis/Redis.ts b/packages/components/nodes/vectorstores/Redis/Redis.ts index d4fbcf49e49..23f1241392e 100644 --- a/packages/components/nodes/vectorstores/Redis/Redis.ts +++ b/packages/components/nodes/vectorstores/Redis/Redis.ts @@ -153,8 +153,12 @@ class Redis_VectorStores implements INode { keepAlive: process.env.REDIS_KEEP_ALIVE && !isNaN(parseInt(process.env.REDIS_KEEP_ALIVE, 10)) ? parseInt(process.env.REDIS_KEEP_ALIVE, 10) - : undefined // milliseconds - } + : undefined + }, + pingInterval: + process.env.REDIS_KEEP_ALIVE && !isNaN(parseInt(process.env.REDIS_KEEP_ALIVE, 10)) + ? parseInt(process.env.REDIS_KEEP_ALIVE, 10) + : undefined // Add Redis protocol-level pings }) await redisClient.connect() @@ -226,8 +230,12 @@ class Redis_VectorStores implements INode { keepAlive: process.env.REDIS_KEEP_ALIVE && !isNaN(parseInt(process.env.REDIS_KEEP_ALIVE, 10)) ? parseInt(process.env.REDIS_KEEP_ALIVE, 10) - : undefined // milliseconds - } + : undefined + }, + pingInterval: + process.env.REDIS_KEEP_ALIVE && !isNaN(parseInt(process.env.REDIS_KEEP_ALIVE, 10)) + ? parseInt(process.env.REDIS_KEEP_ALIVE, 10) + : undefined // Add Redis protocol-level pings }) const storeConfig: RedisVectorStoreConfig = { diff --git a/packages/components/nodes/vectorstores/Vectara/Vectara.ts b/packages/components/nodes/vectorstores/Vectara/Vectara.ts index d7260d109fb..9514bdc8f85 100644 --- a/packages/components/nodes/vectorstores/Vectara/Vectara.ts +++ b/packages/components/nodes/vectorstores/Vectara/Vectara.ts @@ -191,11 +191,12 @@ class Vectara_VectorStores implements INode { } else { files = [fileName] } + const orgId = options.orgId const chatflowid = options.chatflowid for (const file of files) { if (!file) continue - const fileData = await getFileFromStorage(file, chatflowid) + const fileData = await getFileFromStorage(file, orgId, chatflowid) const blob = new Blob([fileData]) vectaraFiles.push({ blob: blob, fileName: getFileName(file) }) } diff --git a/packages/components/nodes/vectorstores/Vectara/Vectara_Upload.ts b/packages/components/nodes/vectorstores/Vectara/Vectara_Upload.ts new file mode 100644 index 00000000000..e205c5e6d34 --- /dev/null +++ b/packages/components/nodes/vectorstores/Vectara/Vectara_Upload.ts @@ -0,0 +1,197 @@ +import { VectaraStore, VectaraLibArgs, VectaraFilter, VectaraContextConfig, VectaraFile } from '@langchain/community/vectorstores/vectara' +import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface' +import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils' +import { getFileFromStorage } from '../../../src' + +class VectaraUpload_VectorStores implements INode { + label: string + name: string + version: number + description: string + type: string + icon: string + category: string + badge: string + baseClasses: string[] + inputs: INodeParams[] + credential: INodeParams + outputs: INodeOutputsValue[] + + constructor() { + this.label = 'Vectara Upload File' + this.name = 'vectaraUpload' + this.version = 1.0 + this.type = 'Vectara' + this.icon = 'vectara.png' + this.category = 'Vector Stores' + this.description = 'Upload files to Vectara' + this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] + this.badge = 'DEPRECATING' + this.credential = { + label: 'Connect Credential', + name: 'credential', + type: 'credential', + credentialNames: ['vectaraApi'] + } + this.inputs = [ + { + label: 'File', + name: 'file', + description: + 'File to upload to Vectara. Supported file types: https://docs.vectara.com/docs/api-reference/indexing-apis/file-upload/file-upload-filetypes', + type: 'file' + }, + { + label: 'Metadata Filter', + name: 'filter', + description: + 'Filter to apply to Vectara metadata. Refer to the documentation on how to use Vectara filters with Flowise.', + type: 'string', + additionalParams: true, + optional: true + }, + { + label: 'Sentences Before', + name: 'sentencesBefore', + description: 'Number of sentences to fetch before the matched sentence. Defaults to 2.', + type: 'number', + additionalParams: true, + optional: true + }, + { + label: 'Sentences After', + name: 'sentencesAfter', + description: 'Number of sentences to fetch after the matched sentence. Defaults to 2.', + type: 'number', + additionalParams: true, + optional: true + }, + { + label: 'Lambda', + name: 'lambda', + description: + 'Improves retrieval accuracy by adjusting the balance (from 0 to 1) between neural search and keyword-based search factors.', + type: 'number', + additionalParams: true, + optional: true + }, + { + label: 'Top K', + name: 'topK', + description: 'Number of top results to fetch. Defaults to 4', + placeholder: '4', + type: 'number', + additionalParams: true, + optional: true + } + ] + this.outputs = [ + { + label: 'Vectara Retriever', + name: 'retriever', + baseClasses: this.baseClasses + }, + { + label: 'Vectara Vector Store', + name: 'vectorStore', + baseClasses: [this.type, ...getBaseClasses(VectaraStore)] + } + ] + } + async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { + const credentialData = await getCredentialData(nodeData.credential ?? '', options) + const apiKey = getCredentialParam('apiKey', credentialData, nodeData) + const customerId = getCredentialParam('customerID', credentialData, nodeData) + const corpusId = getCredentialParam('corpusID', credentialData, nodeData).split(',') + + const fileBase64 = nodeData.inputs?.file + const vectaraMetadataFilter = nodeData.inputs?.filter as string + const sentencesBefore = nodeData.inputs?.sentencesBefore as number + const sentencesAfter = nodeData.inputs?.sentencesAfter as number + const lambda = nodeData.inputs?.lambda as number + const output = nodeData.outputs?.output as string + const topK = nodeData.inputs?.topK as string + const k = topK ? parseInt(topK, 10) : 4 + + const vectaraArgs: VectaraLibArgs = { + apiKey: apiKey, + customerId: customerId, + corpusId: corpusId, + source: 'flowise' + } + + const vectaraFilter: VectaraFilter = {} + if (vectaraMetadataFilter) vectaraFilter.filter = vectaraMetadataFilter + if (lambda) vectaraFilter.lambda = lambda + + const vectaraContextConfig: VectaraContextConfig = {} + if (sentencesBefore) vectaraContextConfig.sentencesBefore = sentencesBefore + if (sentencesAfter) vectaraContextConfig.sentencesAfter = sentencesAfter + vectaraFilter.contextConfig = vectaraContextConfig + + let files: string[] = [] + const vectaraFiles: VectaraFile[] = [] + + if (fileBase64.startsWith('FILE-STORAGE::')) { + const fileName = fileBase64.replace('FILE-STORAGE::', '') + if (fileName.startsWith('[') && fileName.endsWith(']')) { + files = JSON.parse(fileName) + } else { + files = [fileName] + } + const orgId = options.orgId + const chatflowid = options.chatflowid + + for (const file of files) { + const fileData = await getFileFromStorage(file, orgId, chatflowid) + const blob = new Blob([fileData]) + vectaraFiles.push({ blob: blob, fileName: getFileName(file) }) + } + } else { + if (fileBase64.startsWith('[') && fileBase64.endsWith(']')) { + files = JSON.parse(fileBase64) + } else { + files = [fileBase64] + } + + for (const file of files) { + const splitDataURI = file.split(',') + splitDataURI.pop() + const bf = Buffer.from(splitDataURI.pop() || '', 'base64') + const blob = new Blob([bf]) + vectaraFiles.push({ blob: blob, fileName: getFileName(file) }) + } + } + + const vectorStore = new VectaraStore(vectaraArgs) + await vectorStore.addFiles(vectaraFiles) + + if (output === 'retriever') { + const retriever = vectorStore.asRetriever(k, vectaraFilter) + return retriever + } else if (output === 'vectorStore') { + ;(vectorStore as any).k = k + return vectorStore + } + return vectorStore + } +} + +const getFileName = (fileBase64: string) => { + let fileNames = [] + if (fileBase64.startsWith('[') && fileBase64.endsWith(']')) { + const files = JSON.parse(fileBase64) + for (const file of files) { + const splitDataURI = file.split(',') + const filename = splitDataURI[splitDataURI.length - 1].split(':')[1] + fileNames.push(filename) + } + return fileNames.join(', ') + } else { + const splitDataURI = fileBase64.split(',') + const filename = splitDataURI[splitDataURI.length - 1].split(':')[1] + return filename + } +} + +module.exports = { nodeClass: VectaraUpload_VectorStores } diff --git a/packages/components/package.json b/packages/components/package.json index 44246c6509f..ba6c5747b3e 100644 --- a/packages/components/package.json +++ b/packages/components/package.json @@ -1,6 +1,6 @@ { "name": "flowise-components", - "version": "3.0.0", + "version": "3.0.1", "description": "Flowiseai Components", "main": "dist/src/index", "types": "dist/src/index.d.ts", @@ -57,7 +57,7 @@ "@langchain/weaviate": "^0.0.1", "@langchain/xai": "^0.0.1", "@mem0/community": "^0.0.1", - "@mendable/firecrawl-js": "^0.0.28", + "@mendable/firecrawl-js": "^1.18.2", "@mistralai/mistralai": "0.1.3", "@modelcontextprotocol/sdk": "^1.10.1", "@modelcontextprotocol/server-brave-search": "^0.6.2", @@ -133,6 +133,7 @@ "replicate": "^0.31.1", "sanitize-filename": "^1.6.3", "srt-parser-2": "^1.2.3", + "supergateway": "3.0.1", "typeorm": "^0.3.6", "weaviate-ts-client": "^1.1.0", "winston": "^3.9.0", diff --git a/packages/components/src/Interface.Evaluation.ts b/packages/components/src/Interface.Evaluation.ts new file mode 100644 index 00000000000..1199833d2f3 --- /dev/null +++ b/packages/components/src/Interface.Evaluation.ts @@ -0,0 +1,43 @@ +// Evaluation Related Interfaces +export interface IDataset { + id: string + name: string + createdDate: Date + updatedDate: Date +} +export interface IDatasetRow { + id: string + datasetId: string + input: string + output: string + updatedDate: Date +} + +export enum EvaluationStatus { + PENDING = 'pending', + COMPLETED = 'completed' +} +export interface IEvaluation { + id: string + name: string + chatflowId: string + chatflowName: string + datasetId: string + datasetName: string + evaluationType: string + average_metrics: string + status: string + runDate: Date +} + +export interface IEvaluationRun { + id: string + evaluationId: string + input: string + expectedOutput: string + actualOutput: string + metrics: string + runDate: Date + reasoning: string + score: number +} diff --git a/packages/components/src/Interface.ts b/packages/components/src/Interface.ts index 6fd3d884d6e..5e2ee383c07 100644 --- a/packages/components/src/Interface.ts +++ b/packages/components/src/Interface.ts @@ -414,11 +414,14 @@ export interface IVisionChatModal { revertToOriginalModel(): void setMultiModalOption(multiModalOption: IMultiModalOption): void } + export interface IStateWithMessages extends ICommonObject { messages: BaseMessage[] [key: string]: any } +export * from './Interface.Evaluation' + export interface IServerSideEventStreamer { streamStartEvent(chatId: string, data: any): void streamTokenEvent(chatId: string, data: string): void diff --git a/packages/components/src/MetricsLogger.ts b/packages/components/src/MetricsLogger.ts new file mode 100644 index 00000000000..662fc9e2766 --- /dev/null +++ b/packages/components/src/MetricsLogger.ts @@ -0,0 +1,135 @@ +import { BaseTracer, Run } from '@langchain/core/tracers/base' +import { Logger } from 'winston' +import { AgentRun, elapsed, tryJsonStringify } from './handler' + +export class MetricsLogger extends BaseTracer { + name = 'console_callback_handler' as const + logger: Logger + orgId?: string + + protected persistRun(_run: Run) { + return Promise.resolve() + } + + constructor(logger: Logger, orgId?: string) { + super() + this.logger = logger + this.orgId = orgId + } + + // utility methods + + getParents(run: Run) { + const parents: Run[] = [] + let currentRun = run + while (currentRun.parent_run_id) { + const parent = this.runMap.get(currentRun.parent_run_id) + if (parent) { + parents.push(parent) + currentRun = parent + } else { + break + } + } + return parents + } + + getBreadcrumbs(run: Run) { + const parents = this.getParents(run).reverse() + const string = [...parents, run] + .map((parent) => { + const name = `${parent.execution_order}:${parent.run_type}:${parent.name}` + return name + }) + .join(' > ') + return string + } + + // logging methods + + onChainStart(run: Run) { + const crumbs = this.getBreadcrumbs(run) + this.logger.verbose( + `[${this.orgId}]: [chain/start] [${crumbs}] Entering Chain run with input: ${tryJsonStringify(run.inputs, '[inputs]')}` + ) + } + + onChainEnd(run: Run) { + const crumbs = this.getBreadcrumbs(run) + this.logger.verbose( + `[${this.orgId}]: [chain/end] [${crumbs}] [${elapsed(run)}] Exiting Chain run with output: ${tryJsonStringify( + run.outputs, + '[outputs]' + )}` + ) + } + + onChainError(run: Run) { + const crumbs = this.getBreadcrumbs(run) + this.logger.verbose( + `[${this.orgId}]: [chain/error] [${crumbs}] [${elapsed(run)}] Chain run errored with error: ${tryJsonStringify( + run.error, + '[error]' + )}` + ) + } + + onLLMStart(run: Run) { + const crumbs = this.getBreadcrumbs(run) + const inputs = 'prompts' in run.inputs ? { prompts: (run.inputs.prompts as string[]).map((p) => p.trim()) } : run.inputs + this.logger.verbose(`[${this.orgId}]: [llm/start] [${crumbs}] Entering LLM run with input: ${tryJsonStringify(inputs, '[inputs]')}`) + } + + onLLMEnd(run: Run) { + const crumbs = this.getBreadcrumbs(run) + this.logger.verbose( + `[${this.orgId}]: [llm/end] [${crumbs}] [${elapsed(run)}] Exiting LLM run with output: ${tryJsonStringify( + run.outputs, + '[response]' + )}` + ) + } + + onLLMError(run: Run) { + const crumbs = this.getBreadcrumbs(run) + this.logger.verbose( + `[${this.orgId}]: [llm/error] [${crumbs}] [${elapsed(run)}] LLM run errored with error: ${tryJsonStringify( + run.error, + '[error]' + )}` + ) + } + + onToolStart(run: Run) { + const crumbs = this.getBreadcrumbs(run) + this.logger.verbose(`[${this.orgId}]: [tool/start] [${crumbs}] Entering Tool run with input: "${run.inputs.input?.trim()}"`) + } + + onToolEnd(run: Run) { + const crumbs = this.getBreadcrumbs(run) + this.logger.verbose( + `[${this.orgId}]: [tool/end] [${crumbs}] [${elapsed(run)}] Exiting Tool run with output: "${run.outputs?.output?.trim()}"` + ) + } + + onToolError(run: Run) { + const crumbs = this.getBreadcrumbs(run) + this.logger.verbose( + `[${this.orgId}]: [tool/error] [${crumbs}] [${elapsed(run)}] Tool run errored with error: ${tryJsonStringify( + run.error, + '[error]' + )}` + ) + } + + onAgentAction(run: Run) { + const agentRun = run as AgentRun + const crumbs = this.getBreadcrumbs(run) + this.logger.verbose( + `[${this.orgId}]: [agent/action] [${crumbs}] Agent selected action: ${tryJsonStringify( + agentRun.actions[agentRun.actions.length - 1], + '[action]' + )}` + ) + } +} diff --git a/packages/components/src/followUpPrompts.ts b/packages/components/src/followUpPrompts.ts index ecfcfe825a8..cc19864f68b 100644 --- a/packages/components/src/followUpPrompts.ts +++ b/packages/components/src/followUpPrompts.ts @@ -36,6 +36,7 @@ export const generateFollowUpPrompts = async ( model: providerConfig.modelName, temperature: parseFloat(`${providerConfig.temperature}`) }) + // @ts-ignore const structuredLLM = llm.withStructuredOutput(FollowUpPromptType) const structuredResponse = await structuredLLM.invoke(followUpPromptsPrompt) return structuredResponse diff --git a/packages/components/src/handler.ts b/packages/components/src/handler.ts index 0da42f47968..1a8830232ee 100644 --- a/packages/components/src/handler.ts +++ b/packages/components/src/handler.ts @@ -25,6 +25,8 @@ import { AgentAction } from '@langchain/core/agents' import { LunaryHandler } from '@langchain/community/callbacks/handlers/lunary' import { getCredentialData, getCredentialParam, getEnvironmentVariable } from './utils' +import { EvaluationRunTracer } from '../evaluation/EvaluationRunTracer' +import { EvaluationRunTracerLlama } from '../evaluation/EvaluationRunTracerLlama' import { ICommonObject, IDatabaseEntity, INodeData, IServerSideEventStreamer } from './Interface' import { LangWatch, LangWatchSpan, LangWatchTrace, autoconvertTypedValues } from 'langwatch' import { DataSource } from 'typeorm' @@ -32,7 +34,7 @@ import { ChatGenerationChunk } from '@langchain/core/outputs' import { AIMessageChunk, BaseMessageLike } from '@langchain/core/messages' import { Serialized } from '@langchain/core/load/serializable' -interface AgentRun extends Run { +export interface AgentRun extends Run { actions: AgentAction[] } @@ -173,7 +175,7 @@ function tryGetJsonSpaces() { } } -function tryJsonStringify(obj: unknown, fallback: string) { +export function tryJsonStringify(obj: unknown, fallback: string) { try { return JSON.stringify(obj, null, tryGetJsonSpaces()) } catch (err) { @@ -181,7 +183,7 @@ function tryJsonStringify(obj: unknown, fallback: string) { } } -function elapsed(run: Run): string { +export function elapsed(run: Run): string { if (!run.end_time) return '' const elapsed = run.end_time - run.start_time if (elapsed < 1000) { @@ -193,14 +195,16 @@ function elapsed(run: Run): string { export class ConsoleCallbackHandler extends BaseTracer { name = 'console_callback_handler' as const logger: Logger + orgId?: string protected persistRun(_run: Run) { return Promise.resolve() } - constructor(logger: Logger) { + constructor(logger: Logger, orgId?: string) { super() this.logger = logger + this.orgId = orgId if (getEnvironmentVariable('DEBUG') === 'true') { logger.level = getEnvironmentVariable('LOG_LEVEL') ?? 'info' } @@ -235,57 +239,76 @@ export class ConsoleCallbackHandler extends BaseTracer { onChainStart(run: Run) { const crumbs = this.getBreadcrumbs(run) - this.logger.verbose(`[chain/start] [${crumbs}] Entering Chain run with input: ${tryJsonStringify(run.inputs, '[inputs]')}`) + this.logger.verbose( + `[${this.orgId}]: [chain/start] [${crumbs}] Entering Chain run with input: ${tryJsonStringify(run.inputs, '[inputs]')}` + ) } onChainEnd(run: Run) { const crumbs = this.getBreadcrumbs(run) this.logger.verbose( - `[chain/end] [${crumbs}] [${elapsed(run)}] Exiting Chain run with output: ${tryJsonStringify(run.outputs, '[outputs]')}` + `[${this.orgId}]: [chain/end] [${crumbs}] [${elapsed(run)}] Exiting Chain run with output: ${tryJsonStringify( + run.outputs, + '[outputs]' + )}` ) } onChainError(run: Run) { const crumbs = this.getBreadcrumbs(run) this.logger.verbose( - `[chain/error] [${crumbs}] [${elapsed(run)}] Chain run errored with error: ${tryJsonStringify(run.error, '[error]')}` + `[${this.orgId}]: [chain/error] [${crumbs}] [${elapsed(run)}] Chain run errored with error: ${tryJsonStringify( + run.error, + '[error]' + )}` ) } onLLMStart(run: Run) { const crumbs = this.getBreadcrumbs(run) const inputs = 'prompts' in run.inputs ? { prompts: (run.inputs.prompts as string[]).map((p) => p.trim()) } : run.inputs - this.logger.verbose(`[llm/start] [${crumbs}] Entering LLM run with input: ${tryJsonStringify(inputs, '[inputs]')}`) + this.logger.verbose(`[${this.orgId}]: [llm/start] [${crumbs}] Entering LLM run with input: ${tryJsonStringify(inputs, '[inputs]')}`) } onLLMEnd(run: Run) { const crumbs = this.getBreadcrumbs(run) this.logger.verbose( - `[llm/end] [${crumbs}] [${elapsed(run)}] Exiting LLM run with output: ${tryJsonStringify(run.outputs, '[response]')}` + `[${this.orgId}]: [llm/end] [${crumbs}] [${elapsed(run)}] Exiting LLM run with output: ${tryJsonStringify( + run.outputs, + '[response]' + )}` ) } onLLMError(run: Run) { const crumbs = this.getBreadcrumbs(run) this.logger.verbose( - `[llm/error] [${crumbs}] [${elapsed(run)}] LLM run errored with error: ${tryJsonStringify(run.error, '[error]')}` + `[${this.orgId}]: [llm/error] [${crumbs}] [${elapsed(run)}] LLM run errored with error: ${tryJsonStringify( + run.error, + '[error]' + )}` ) } onToolStart(run: Run) { const crumbs = this.getBreadcrumbs(run) - this.logger.verbose(`[tool/start] [${crumbs}] Entering Tool run with input: "${run.inputs.input?.trim()}"`) + this.logger.verbose(`[${this.orgId}]: [tool/start] [${crumbs}] Entering Tool run with input: "${run.inputs.input?.trim()}"`) } onToolEnd(run: Run) { const crumbs = this.getBreadcrumbs(run) - this.logger.verbose(`[tool/end] [${crumbs}] [${elapsed(run)}] Exiting Tool run with output: "${run.outputs?.output?.trim()}"`) + this.logger.verbose( + `[${this.orgId}]: [tool/end] [${crumbs}] [${elapsed(run)}] Exiting Tool run with output: "${run.outputs?.output?.trim()}"` + ) } onToolError(run: Run) { const crumbs = this.getBreadcrumbs(run) this.logger.verbose( - `[tool/error] [${crumbs}] [${elapsed(run)}] Tool run errored with error: ${tryJsonStringify(run.error, '[error]')}` + `[${this.orgId}]: [tool/error] [${crumbs}] [${elapsed(run)}] Tool run errored with error: ${tryJsonStringify( + run.error, + '[error]' + )}` ) } @@ -293,7 +316,7 @@ export class ConsoleCallbackHandler extends BaseTracer { const agentRun = run as AgentRun const crumbs = this.getBreadcrumbs(run) this.logger.verbose( - `[agent/action] [${crumbs}] Agent selected action: ${tryJsonStringify( + `[${this.orgId}]: [agent/action] [${crumbs}] Agent selected action: ${tryJsonStringify( agentRun.actions[agentRun.actions.length - 1], '[action]' )}` @@ -396,6 +419,7 @@ export class CustomChainHandler extends BaseCallbackHandler { } } +/*TODO - Add llamaIndex tracer to non evaluation runs*/ class ExtendedLunaryHandler extends LunaryHandler { chatId: string appDataSource: DataSource @@ -550,6 +574,13 @@ export const additionalCallbacks = async (nodeData: INodeData, options: ICommonO const handler = new ExtendedLunaryHandler(lunaryFields) callbacks.push(handler) + } else if (provider === 'evaluation') { + if (options.llamaIndex) { + new EvaluationRunTracerLlama(options.evaluationRunId) + } else { + const evaluationHandler = new EvaluationRunTracer(options.evaluationRunId) + callbacks.push(evaluationHandler) + } } else if (provider === 'langWatch') { const langWatchApiKey = getCredentialParam('langWatchApiKey', credentialData, nodeData) const langWatchEndpoint = getCredentialParam('langWatchEndpoint', credentialData, nodeData) diff --git a/packages/components/src/index.ts b/packages/components/src/index.ts index 2944bc320d0..f2dc564042d 100644 --- a/packages/components/src/index.ts +++ b/packages/components/src/index.ts @@ -9,6 +9,7 @@ export * from './utils' export * from './speechToText' export * from './storageUtils' export * from './handler' +export * from '../evaluation/EvaluationRunner' export * from './followUpPrompts' export * from './validator' export * from './agentflowv2Generator' diff --git a/packages/components/src/modelLoader.ts b/packages/components/src/modelLoader.ts index be588d57343..1628c91adfe 100644 --- a/packages/components/src/modelLoader.ts +++ b/packages/components/src/modelLoader.ts @@ -76,6 +76,66 @@ const getModelConfig = async (category: MODEL_TYPE, name: string) => { } } +export const getModelConfigByModelName = async (category: MODEL_TYPE, provider: string | undefined, name: string | undefined) => { + const modelFile = process.env.MODEL_LIST_CONFIG_JSON || MASTER_MODEL_LIST + + if (!modelFile) { + throw new Error('MODEL_LIST_CONFIG_JSON not set') + } + if (isValidUrl(modelFile)) { + try { + const resp = await axios.get(modelFile) + if (resp.status === 200 && resp.data) { + const models = resp.data + const categoryModels = models[category] + // each element of categoryModels is an object, with an array of models (models) and regions (regions) + // check if the name is in models + return getSpecificModelFromCategory(categoryModels, provider, name) + } else { + throw new Error('Error fetching model list') + } + } catch (e) { + const models = await fs.promises.readFile(getModelsJSONPath(), 'utf8') + if (models) { + const categoryModels = JSON.parse(models)[category] + return getSpecificModelFromCategory(categoryModels, provider, name) + } + return {} + } + } else { + try { + if (fs.existsSync(modelFile)) { + const models = await fs.promises.readFile(modelFile, 'utf8') + if (models) { + const categoryModels = JSON.parse(models)[category] + return getSpecificModelFromCategory(categoryModels, provider, name) + } + } + return {} + } catch (e) { + const models = await fs.promises.readFile(getModelsJSONPath(), 'utf8') + if (models) { + const categoryModels = JSON.parse(models)[category] + return getSpecificModelFromCategory(categoryModels, provider, name) + } + return {} + } + } +} + +const getSpecificModelFromCategory = (categoryModels: any, provider: string | undefined, name: string | undefined) => { + for (const cm of categoryModels) { + if (cm.models && cm.name.toLowerCase() === provider?.toLowerCase()) { + for (const m of cm.models) { + if (m.name === name) { + return m + } + } + } + } + return undefined +} + export const getModels = async (category: MODEL_TYPE, name: string) => { const returnData: INodeOptionsValue[] = [] try { diff --git a/packages/components/src/multiModalUtils.ts b/packages/components/src/multiModalUtils.ts index 412361aa6c2..b048e8a91a9 100644 --- a/packages/components/src/multiModalUtils.ts +++ b/packages/components/src/multiModalUtils.ts @@ -16,7 +16,7 @@ export const addImagesToMessages = async ( for (const upload of imageUploads) { let bf = upload.data if (upload.type == 'stored-file') { - const contents = await getFileFromStorage(upload.name, options.chatflowid, options.chatId) + const contents = await getFileFromStorage(upload.name, options.orgId, options.chatflowid, options.chatId) // as the image is stored in the server, read the file and convert it to base64 bf = 'data:' + upload.mime + ';base64,' + contents.toString('base64') diff --git a/packages/components/src/speechToText.ts b/packages/components/src/speechToText.ts index fbb659d54e3..29097655ab7 100644 --- a/packages/components/src/speechToText.ts +++ b/packages/components/src/speechToText.ts @@ -18,7 +18,7 @@ export const convertSpeechToText = async (upload: IFileUpload, speechToTextConfi if (speechToTextConfig) { const credentialId = speechToTextConfig.credentialId as string const credentialData = await getCredentialData(credentialId ?? '', options) - const audio_file = await getFileFromStorage(upload.name, options.chatflowid, options.chatId) + const audio_file = await getFileFromStorage(upload.name, options.orgId, options.chatflowid, options.chatId) switch (speechToTextConfig.name) { case SpeechToTextType.OPENAI_WHISPER: { diff --git a/packages/components/src/storageUtils.ts b/packages/components/src/storageUtils.ts index a918c4f002e..b1f74bf7703 100644 --- a/packages/components/src/storageUtils.ts +++ b/packages/components/src/storageUtils.ts @@ -5,6 +5,7 @@ import { GetObjectCommand, ListObjectsV2Command, PutObjectCommand, + ListObjectsCommand, S3Client, S3ClientConfig } from '@aws-sdk/client-s3' @@ -13,7 +14,32 @@ import { Readable } from 'node:stream' import { getUserHome } from './utils' import sanitize from 'sanitize-filename' -export const addBase64FilesToStorage = async (fileBase64: string, chatflowid: string, fileNames: string[]) => { +const dirSize = async (directoryPath: string) => { + let totalSize = 0 + + async function calculateSize(itemPath: string) { + const stats = await fs.promises.stat(itemPath) + + if (stats.isFile()) { + totalSize += stats.size + } else if (stats.isDirectory()) { + const files = await fs.promises.readdir(itemPath) + for (const file of files) { + await calculateSize(path.join(itemPath, file)) + } + } + } + + await calculateSize(directoryPath) + return totalSize +} + +export const addBase64FilesToStorage = async ( + fileBase64: string, + chatflowid: string, + fileNames: string[], + orgId: string +): Promise<{ path: string; totalSize: number }> => { const storageType = getStorageType() if (storageType === 's3') { const { s3Client, Bucket } = getS3Config() @@ -24,8 +50,8 @@ export const addBase64FilesToStorage = async (fileBase64: string, chatflowid: st const mime = splitDataURI[0].split(':')[1].split(';')[0] const sanitizedFilename = _sanitizeFilename(filename) + const Key = orgId + '/' + chatflowid + '/' + sanitizedFilename - const Key = chatflowid + '/' + sanitizedFilename const putObjCmd = new PutObjectCommand({ Bucket, Key, @@ -36,7 +62,9 @@ export const addBase64FilesToStorage = async (fileBase64: string, chatflowid: st await s3Client.send(putObjCmd) fileNames.push(sanitizedFilename) - return 'FILE-STORAGE::' + JSON.stringify(fileNames) + const totalSize = await getS3StorageSize(orgId) + + return { path: 'FILE-STORAGE::' + JSON.stringify(fileNames), totalSize: totalSize / 1024 / 1024 } } else if (storageType === 'gcs') { const { bucket } = getGcsClient() const splitDataURI = fileBase64.split(',') @@ -55,9 +83,11 @@ export const addBase64FilesToStorage = async (fileBase64: string, chatflowid: st .end(bf) }) fileNames.push(sanitizedFilename) - return 'FILE-STORAGE::' + JSON.stringify(fileNames) + const totalSize = await getGCSStorageSize(orgId) + + return { path: 'FILE-STORAGE::' + JSON.stringify(fileNames), totalSize: totalSize / 1024 / 1024 } } else { - const dir = path.join(getStoragePath(), chatflowid) + const dir = path.join(getStoragePath(), orgId, chatflowid) if (!fs.existsSync(dir)) { fs.mkdirSync(dir, { recursive: true }) } @@ -68,13 +98,22 @@ export const addBase64FilesToStorage = async (fileBase64: string, chatflowid: st const sanitizedFilename = _sanitizeFilename(filename) const filePath = path.join(dir, sanitizedFilename) + fs.writeFileSync(filePath, bf) fileNames.push(sanitizedFilename) - return 'FILE-STORAGE::' + JSON.stringify(fileNames) + + const totalSize = await dirSize(path.join(getStoragePath(), orgId)) + return { path: 'FILE-STORAGE::' + JSON.stringify(fileNames), totalSize: totalSize / 1024 / 1024 } } } -export const addArrayFilesToStorage = async (mime: string, bf: Buffer, fileName: string, fileNames: string[], ...paths: string[]) => { +export const addArrayFilesToStorage = async ( + mime: string, + bf: Buffer, + fileName: string, + fileNames: string[], + ...paths: string[] +): Promise<{ path: string; totalSize: number }> => { const storageType = getStorageType() const sanitizedFilename = _sanitizeFilename(fileName) @@ -95,7 +134,10 @@ export const addArrayFilesToStorage = async (mime: string, bf: Buffer, fileName: }) await s3Client.send(putObjCmd) fileNames.push(sanitizedFilename) - return 'FILE-STORAGE::' + JSON.stringify(fileNames) + + const totalSize = await getS3StorageSize(paths[0]) + + return { path: 'FILE-STORAGE::' + JSON.stringify(fileNames), totalSize: totalSize / 1024 / 1024 } } else if (storageType === 'gcs') { const { bucket } = getGcsClient() const normalizedPaths = paths.map((p) => p.replace(/\\/g, '/')) @@ -109,7 +151,10 @@ export const addArrayFilesToStorage = async (mime: string, bf: Buffer, fileName: .end(bf) }) fileNames.push(sanitizedFilename) - return 'FILE-STORAGE::' + JSON.stringify(fileNames) + + const totalSize = await getGCSStorageSize(paths[0]) + + return { path: 'FILE-STORAGE::' + JSON.stringify(fileNames), totalSize: totalSize / 1024 / 1024 } } else { const dir = path.join(getStoragePath(), ...paths.map(_sanitizeFilename)) if (!fs.existsSync(dir)) { @@ -118,11 +163,19 @@ export const addArrayFilesToStorage = async (mime: string, bf: Buffer, fileName: const filePath = path.join(dir, sanitizedFilename) fs.writeFileSync(filePath, bf) fileNames.push(sanitizedFilename) - return 'FILE-STORAGE::' + JSON.stringify(fileNames) + + const totalSize = await dirSize(path.join(getStoragePath(), paths[0])) + + return { path: 'FILE-STORAGE::' + JSON.stringify(fileNames), totalSize: totalSize / 1024 / 1024 } } } -export const addSingleFileToStorage = async (mime: string, bf: Buffer, fileName: string, ...paths: string[]) => { +export const addSingleFileToStorage = async ( + mime: string, + bf: Buffer, + fileName: string, + ...paths: string[] +): Promise<{ path: string; totalSize: number }> => { const storageType = getStorageType() const sanitizedFilename = _sanitizeFilename(fileName) @@ -142,7 +195,10 @@ export const addSingleFileToStorage = async (mime: string, bf: Buffer, fileName: Body: bf }) await s3Client.send(putObjCmd) - return 'FILE-STORAGE::' + sanitizedFilename + + const totalSize = await getS3StorageSize(paths[0]) + + return { path: 'FILE-STORAGE::' + sanitizedFilename, totalSize: totalSize / 1024 / 1024 } } else if (storageType === 'gcs') { const { bucket } = getGcsClient() const normalizedPaths = paths.map((p) => p.replace(/\\/g, '/')) @@ -155,7 +211,10 @@ export const addSingleFileToStorage = async (mime: string, bf: Buffer, fileName: .on('finish', () => resolve()) .end(bf) }) - return 'FILE-STORAGE::' + sanitizedFilename + + const totalSize = await getGCSStorageSize(paths[0]) + + return { path: 'FILE-STORAGE::' + sanitizedFilename, totalSize: totalSize / 1024 / 1024 } } else { const dir = path.join(getStoragePath(), ...paths.map(_sanitizeFilename)) if (!fs.existsSync(dir)) { @@ -163,7 +222,9 @@ export const addSingleFileToStorage = async (mime: string, bf: Buffer, fileName: } const filePath = path.join(dir, sanitizedFilename) fs.writeFileSync(filePath, bf) - return 'FILE-STORAGE::' + sanitizedFilename + + const totalSize = await dirSize(path.join(getStoragePath(), paths[0])) + return { path: 'FILE-STORAGE::' + sanitizedFilename, totalSize: totalSize / 1024 / 1024 } } } @@ -215,36 +276,246 @@ export const getFileFromStorage = async (file: string, ...paths: string[]): Prom Key = Key.substring(1) } - const getParams = { - Bucket, - Key - } + try { + const getParams = { + Bucket, + Key + } - const response = await s3Client.send(new GetObjectCommand(getParams)) - const body = response.Body - if (body instanceof Readable) { - const streamToString = await body.transformToString('base64') - if (streamToString) { - return Buffer.from(streamToString, 'base64') + const response = await s3Client.send(new GetObjectCommand(getParams)) + const body = response.Body + if (body instanceof Readable) { + const streamToString = await body.transformToString('base64') + if (streamToString) { + return Buffer.from(streamToString, 'base64') + } + } + // @ts-ignore + const buffer = Buffer.concat(response.Body.toArray()) + return buffer + } catch (error) { + // Fallback: Check if file exists without the first path element (likely orgId) + if (paths.length > 1) { + const fallbackPaths = paths.slice(1) + let fallbackKey = fallbackPaths.reduce((acc, cur) => acc + '/' + cur, '') + '/' + sanitizedFilename + if (fallbackKey.startsWith('/')) { + fallbackKey = fallbackKey.substring(1) + } + + try { + const fallbackParams = { + Bucket, + Key: fallbackKey + } + const fallbackResponse = await s3Client.send(new GetObjectCommand(fallbackParams)) + const fallbackBody = fallbackResponse.Body + + // Get the file content + let fileContent: Buffer + if (fallbackBody instanceof Readable) { + const streamToString = await fallbackBody.transformToString('base64') + if (streamToString) { + fileContent = Buffer.from(streamToString, 'base64') + } else { + // @ts-ignore + fileContent = Buffer.concat(fallbackBody.toArray()) + } + } else { + // @ts-ignore + fileContent = Buffer.concat(fallbackBody.toArray()) + } + + // Move to correct location with orgId + const putObjCmd = new PutObjectCommand({ + Bucket, + Key, + Body: fileContent + }) + await s3Client.send(putObjCmd) + + // Delete the old file + await s3Client.send( + new DeleteObjectsCommand({ + Bucket, + Delete: { + Objects: [{ Key: fallbackKey }], + Quiet: false + } + }) + ) + + // Check if the directory is empty and delete recursively if needed + if (fallbackPaths.length > 0) { + await _cleanEmptyS3Folders(s3Client, Bucket, fallbackPaths[0]) + } + + return fileContent + } catch (fallbackError) { + // Throw the original error since the fallback also failed + throw error + } + } else { + throw error } } - // @ts-ignore - const buffer = Buffer.concat(response.Body.toArray()) - return buffer } else if (storageType === 'gcs') { const { bucket } = getGcsClient() const normalizedPaths = paths.map((p) => p.replace(/\\/g, '/')) const normalizedFilename = sanitizedFilename.replace(/\\/g, '/') const filePath = [...normalizedPaths, normalizedFilename].join('/') - const file = bucket.file(filePath) - const [buffer] = await file.download() - return buffer + + try { + const file = bucket.file(filePath) + const [buffer] = await file.download() + return buffer + } catch (error) { + // Fallback: Check if file exists without the first path element (likely orgId) + if (normalizedPaths.length > 1) { + const fallbackPaths = normalizedPaths.slice(1) + const fallbackPath = [...fallbackPaths, normalizedFilename].join('/') + + try { + const fallbackFile = bucket.file(fallbackPath) + const [buffer] = await fallbackFile.download() + + // Move to correct location with orgId + const file = bucket.file(filePath) + await new Promise((resolve, reject) => { + file.createWriteStream() + .on('error', (err) => reject(err)) + .on('finish', () => resolve()) + .end(buffer) + }) + + // Delete the old file + await fallbackFile.delete() + + // Check if the directory is empty and delete recursively if needed + if (fallbackPaths.length > 0) { + await _cleanEmptyGCSFolders(bucket, fallbackPaths[0]) + } + + return buffer + } catch (fallbackError) { + // Throw the original error since the fallback also failed + throw error + } + } else { + throw error + } + } + } else { + try { + const fileInStorage = path.join(getStoragePath(), ...paths.map(_sanitizeFilename), sanitizedFilename) + return fs.readFileSync(fileInStorage) + } catch (error) { + // Fallback: Check if file exists without the first path element (likely orgId) + if (paths.length > 1) { + const fallbackPaths = paths.slice(1) + const fallbackPath = path.join(getStoragePath(), ...fallbackPaths.map(_sanitizeFilename), sanitizedFilename) + + if (fs.existsSync(fallbackPath)) { + // Create directory if it doesn't exist + const targetPath = path.join(getStoragePath(), ...paths.map(_sanitizeFilename), sanitizedFilename) + const dir = path.dirname(targetPath) + if (!fs.existsSync(dir)) { + fs.mkdirSync(dir, { recursive: true }) + } + + // Copy file to correct location with orgId + fs.copyFileSync(fallbackPath, targetPath) + + // Delete the old file + fs.unlinkSync(fallbackPath) + + // Clean up empty directories recursively + if (fallbackPaths.length > 0) { + _cleanEmptyLocalFolders(path.join(getStoragePath(), ...fallbackPaths.map(_sanitizeFilename).slice(0, -1))) + } + + return fs.readFileSync(targetPath) + } else { + throw error + } + } else { + throw error + } + } + } +} + +export const getFilesListFromStorage = async (...paths: string[]): Promise> => { + const storageType = getStorageType() + if (storageType === 's3') { + const { s3Client, Bucket } = getS3Config() + + let Key = paths.reduce((acc, cur) => acc + '/' + cur, '') + if (Key.startsWith('/')) { + Key = Key.substring(1) + } + + const listCommand = new ListObjectsV2Command({ + Bucket, + Prefix: Key + }) + const list = await s3Client.send(listCommand) + + if (list.Contents && list.Contents.length > 0) { + return list.Contents.map((item) => ({ + name: item.Key?.split('/').pop() || '', + path: item.Key ?? '', + size: item.Size || 0 + })) + } else { + return [] + } } else { - const fileInStorage = path.join(getStoragePath(), ...paths.map(_sanitizeFilename), sanitizedFilename) - return fs.readFileSync(fileInStorage) + const directory = path.join(getStoragePath(), ...paths) + const filesList = getFilePaths(directory) + return filesList } } +interface FileInfo { + name: string + path: string + size: number +} + +function getFilePaths(dir: string): FileInfo[] { + let results: FileInfo[] = [] + + function readDirectory(directory: string) { + try { + if (!fs.existsSync(directory)) { + console.warn(`Directory does not exist: ${directory}`) + return + } + + const list = fs.readdirSync(directory) + list.forEach((file) => { + const filePath = path.join(directory, file) + try { + const stat = fs.statSync(filePath) + if (stat && stat.isDirectory()) { + readDirectory(filePath) + } else { + const sizeInMB = stat.size / (1024 * 1024) + results.push({ name: file, path: filePath, size: sizeInMB }) + } + } catch (error) { + console.error(`Error processing file ${filePath}:`, error) + } + }) + } catch (error) { + console.error(`Error reading directory ${directory}:`, error) + } + } + + readDirectory(dir) + return results +} + /** * Prepare storage path */ @@ -267,14 +538,26 @@ export const removeFilesFromStorage = async (...paths: string[]) => { if (Key.startsWith('/')) { Key = Key.substring(1) } + await _deleteS3Folder(Key) + + // check folder size after deleting all the files + const totalSize = await getS3StorageSize(paths[0]) + return { totalSize: totalSize / 1024 / 1024 } } else if (storageType === 'gcs') { const { bucket } = getGcsClient() const normalizedPath = paths.map((p) => p.replace(/\\/g, '/')).join('/') await bucket.deleteFiles({ prefix: `${normalizedPath}/` }) + + const totalSize = await getGCSStorageSize(paths[0]) + return { totalSize: totalSize / 1024 / 1024 } } else { const directory = path.join(getStoragePath(), ...paths.map(_sanitizeFilename)) - _deleteLocalFolderRecursive(directory) + await _deleteLocalFolderRecursive(directory) + + const totalSize = await dirSize(path.join(getStoragePath(), paths[0])) + + return { totalSize: totalSize / 1024 / 1024 } } } @@ -304,6 +587,10 @@ export const removeSpecificFileFromStorage = async (...paths: string[]) => { Key = Key.substring(1) } await _deleteS3Folder(Key) + + // check folder size after deleting all the files + const totalSize = await getS3StorageSize(paths[0]) + return { totalSize: totalSize / 1024 / 1024 } } else if (storageType === 'gcs') { const { bucket } = getGcsClient() const fileName = paths.pop() @@ -313,6 +600,9 @@ export const removeSpecificFileFromStorage = async (...paths: string[]) => { } const normalizedPath = paths.map((p) => p.replace(/\\/g, '/')).join('/') await bucket.file(normalizedPath).delete() + + const totalSize = await getGCSStorageSize(paths[0]) + return { totalSize: totalSize / 1024 / 1024 } } else { const fileName = paths.pop() if (fileName) { @@ -320,7 +610,15 @@ export const removeSpecificFileFromStorage = async (...paths: string[]) => { paths.push(sanitizedFilename) } const file = path.join(getStoragePath(), ...paths.map(_sanitizeFilename)) - fs.unlinkSync(file) + // check if file exists, if not skip delete + // this might happen when user tries to delete a document loader but the attached file is already deleted + const stat = fs.statSync(file, { throwIfNoEntry: false }) + if (stat && stat.isFile()) { + fs.unlinkSync(file) + } + + const totalSize = await dirSize(path.join(getStoragePath(), paths[0])) + return { totalSize: totalSize / 1024 / 1024 } } } @@ -333,52 +631,63 @@ export const removeFolderFromStorage = async (...paths: string[]) => { Key = Key.substring(1) } await _deleteS3Folder(Key) + + // check folder size after deleting all the files + const totalSize = await getS3StorageSize(paths[0]) + return { totalSize: totalSize / 1024 / 1024 } } else if (storageType === 'gcs') { const { bucket } = getGcsClient() const normalizedPath = paths.map((p) => p.replace(/\\/g, '/')).join('/') await bucket.deleteFiles({ prefix: `${normalizedPath}/` }) + + const totalSize = await getGCSStorageSize(paths[0]) + return { totalSize: totalSize / 1024 / 1024 } } else { const directory = path.join(getStoragePath(), ...paths.map(_sanitizeFilename)) - _deleteLocalFolderRecursive(directory, true) + await _deleteLocalFolderRecursive(directory, true) + + const totalSize = await dirSize(path.join(getStoragePath(), paths[0])) + return { totalSize: totalSize / 1024 / 1024 } } } -const _deleteLocalFolderRecursive = (directory: string, deleteParentChatflowFolder?: boolean) => { - // Console error here as failing is not destructive operation - if (fs.existsSync(directory)) { +const _deleteLocalFolderRecursive = async (directory: string, deleteParentChatflowFolder?: boolean) => { + try { + // Check if the path exists + await fs.promises.access(directory) + if (deleteParentChatflowFolder) { - fs.rmSync(directory, { recursive: true, force: true }) + await fs.promises.rmdir(directory, { recursive: true }) + } + + // Get stats of the path to determine if it's a file or directory + const stats = await fs.promises.stat(directory) + + if (stats.isDirectory()) { + // Read all directory contents + const files = await fs.promises.readdir(directory) + + // Recursively delete all contents + for (const file of files) { + const currentPath = path.join(directory, file) + await _deleteLocalFolderRecursive(currentPath) // Recursive call + } + + // Delete the directory itself after emptying it + await fs.promises.rmdir(directory, { recursive: true }) } else { - fs.readdir(directory, (error, files) => { - if (error) console.error('Could not read directory') - - for (let i = 0; i < files.length; i++) { - const file = files[i] - const file_path = path.join(directory, file) - - fs.stat(file_path, (error, stat) => { - if (error) console.error('File do not exist') - - if (!stat.isDirectory()) { - fs.unlink(file_path, (error) => { - if (error) console.error('Could not delete file') - }) - if (i === files.length - 1) { - fs.rmSync(directory, { recursive: true, force: true }) - } - } else { - _deleteLocalFolderRecursive(file_path) - } - }) - } - }) + // If it's a file, delete it directly + await fs.promises.unlink(directory) } + } catch (error) { + // Error handling } } const _deleteS3Folder = async (location: string) => { let count = 0 // number of files deleted const { s3Client, Bucket } = getS3Config() + async function recursiveS3Delete(token?: any) { // get the files const listCommand = new ListObjectsV2Command({ @@ -410,6 +719,7 @@ const _deleteS3Folder = async (location: string) => { // return total deleted count when finished return `${count} files deleted from S3` } + // start the recursive function return recursiveS3Delete() } @@ -417,34 +727,120 @@ const _deleteS3Folder = async (location: string) => { export const streamStorageFile = async ( chatflowId: string, chatId: string, - fileName: string + fileName: string, + orgId: string ): Promise => { const storageType = getStorageType() const sanitizedFilename = sanitize(fileName) if (storageType === 's3') { const { s3Client, Bucket } = getS3Config() - const Key = chatflowId + '/' + chatId + '/' + sanitizedFilename + const Key = orgId + '/' + chatflowId + '/' + chatId + '/' + sanitizedFilename const getParams = { Bucket, Key } - const response = await s3Client.send(new GetObjectCommand(getParams)) - const body = response.Body - if (body instanceof Readable) { - const blob = await body.transformToByteArray() - return Buffer.from(blob) + try { + const response = await s3Client.send(new GetObjectCommand(getParams)) + const body = response.Body + if (body instanceof Readable) { + const blob = await body.transformToByteArray() + return Buffer.from(blob) + } + } catch (error) { + // Fallback: Check if file exists without orgId + const fallbackKey = chatflowId + '/' + chatId + '/' + sanitizedFilename + try { + const fallbackParams = { + Bucket, + Key: fallbackKey + } + const fallbackResponse = await s3Client.send(new GetObjectCommand(fallbackParams)) + const fallbackBody = fallbackResponse.Body + + // If found, copy to correct location with orgId + if (fallbackBody) { + // Get the file content + let fileContent: Buffer + if (fallbackBody instanceof Readable) { + const blob = await fallbackBody.transformToByteArray() + fileContent = Buffer.from(blob) + } else { + // @ts-ignore + fileContent = Buffer.concat(fallbackBody.toArray()) + } + + // Move to correct location with orgId + const putObjCmd = new PutObjectCommand({ + Bucket, + Key, + Body: fileContent + }) + await s3Client.send(putObjCmd) + + // Delete the old file + await s3Client.send( + new DeleteObjectsCommand({ + Bucket, + Delete: { + Objects: [{ Key: fallbackKey }], + Quiet: false + } + }) + ) + + // Check if the directory is empty and delete recursively if needed + await _cleanEmptyS3Folders(s3Client, Bucket, chatflowId) + + return fileContent + } + } catch (fallbackError) { + // File not found in fallback location either + throw new Error(`File ${fileName} not found`) + } } } else if (storageType === 'gcs') { const { bucket } = getGcsClient() const normalizedChatflowId = chatflowId.replace(/\\/g, '/') const normalizedChatId = chatId.replace(/\\/g, '/') const normalizedFilename = sanitizedFilename.replace(/\\/g, '/') - const filePath = `${normalizedChatflowId}/${normalizedChatId}/${normalizedFilename}` - const [buffer] = await bucket.file(filePath).download() - return buffer + const filePath = `${orgId}/${normalizedChatflowId}/${normalizedChatId}/${normalizedFilename}` + + try { + const [buffer] = await bucket.file(filePath).download() + return buffer + } catch (error) { + // Fallback: Check if file exists without orgId + const fallbackPath = `${normalizedChatflowId}/${normalizedChatId}/${normalizedFilename}` + try { + const fallbackFile = bucket.file(fallbackPath) + const [buffer] = await fallbackFile.download() + + // If found, copy to correct location with orgId + if (buffer) { + const file = bucket.file(filePath) + await new Promise((resolve, reject) => { + file.createWriteStream() + .on('error', (err) => reject(err)) + .on('finish', () => resolve()) + .end(buffer) + }) + + // Delete the old file + await fallbackFile.delete() + + // Check if the directory is empty and delete recursively if needed + await _cleanEmptyGCSFolders(bucket, normalizedChatflowId) + + return buffer + } + } catch (fallbackError) { + // File not found in fallback location either + throw new Error(`File ${fileName} not found`) + } + } } else { - const filePath = path.join(getStoragePath(), chatflowId, chatId, sanitizedFilename) + const filePath = path.join(getStoragePath(), orgId, chatflowId, chatId, sanitizedFilename) //raise error if file path is not absolute if (!path.isAbsolute(filePath)) throw new Error(`Invalid file path`) //raise error if file path contains '..' @@ -455,11 +851,159 @@ export const streamStorageFile = async ( if (fs.existsSync(filePath)) { return fs.createReadStream(filePath) } else { - throw new Error(`File ${fileName} not found`) + // Fallback: Check if file exists without orgId + const fallbackPath = path.join(getStoragePath(), chatflowId, chatId, sanitizedFilename) + + if (fs.existsSync(fallbackPath)) { + // Create directory if it doesn't exist + const dir = path.dirname(filePath) + if (!fs.existsSync(dir)) { + fs.mkdirSync(dir, { recursive: true }) + } + + // Copy file to correct location with orgId + fs.copyFileSync(fallbackPath, filePath) + + // Delete the old file + fs.unlinkSync(fallbackPath) + + // Clean up empty directories recursively + _cleanEmptyLocalFolders(path.join(getStoragePath(), chatflowId, chatId)) + + return fs.createReadStream(filePath) + } else { + throw new Error(`File ${fileName} not found`) + } + } + } +} + +/** + * Check if a local directory is empty and delete it if so, + * then check parent directories recursively + */ +const _cleanEmptyLocalFolders = (dirPath: string) => { + try { + // Stop if we reach the storage root + if (dirPath === getStoragePath()) return + + // Check if directory exists + if (!fs.existsSync(dirPath)) return + + // Read directory contents + const files = fs.readdirSync(dirPath) + + // If directory is empty, delete it and check parent + if (files.length === 0) { + fs.rmdirSync(dirPath) + // Recursively check parent directory + _cleanEmptyLocalFolders(path.dirname(dirPath)) } + } catch (error) { + // Ignore errors during cleanup + console.error('Error cleaning empty folders:', error) } } +/** + * Check if an S3 "folder" is empty and delete it recursively + */ +const _cleanEmptyS3Folders = async (s3Client: S3Client, Bucket: string, prefix: string) => { + try { + // Skip if prefix is empty + if (!prefix) return + + // List objects in this "folder" + const listCmd = new ListObjectsV2Command({ + Bucket, + Prefix: prefix + '/', + Delimiter: '/' + }) + + const response = await s3Client.send(listCmd) + + // If folder is empty (only contains common prefixes but no files) + if ( + (response.Contents?.length === 0 || !response.Contents) && + (response.CommonPrefixes?.length === 0 || !response.CommonPrefixes) + ) { + // Delete the folder marker if it exists + await s3Client.send( + new DeleteObjectsCommand({ + Bucket, + Delete: { + Objects: [{ Key: prefix + '/' }], + Quiet: true + } + }) + ) + + // Recursively check parent folder + const parentPrefix = prefix.substring(0, prefix.lastIndexOf('/')) + if (parentPrefix) { + await _cleanEmptyS3Folders(s3Client, Bucket, parentPrefix) + } + } + } catch (error) { + // Ignore errors during cleanup + console.error('Error cleaning empty S3 folders:', error) + } +} + +/** + * Check if a GCS "folder" is empty and delete recursively if so + */ +const _cleanEmptyGCSFolders = async (bucket: any, prefix: string) => { + try { + // Skip if prefix is empty + if (!prefix) return + + // List files with this prefix + const [files] = await bucket.getFiles({ + prefix: prefix + '/', + delimiter: '/' + }) + + // If folder is empty (no files) + if (files.length === 0) { + // Delete the folder marker if it exists + try { + await bucket.file(prefix + '/').delete() + } catch (err) { + // Folder marker might not exist, ignore + } + + // Recursively check parent folder + const parentPrefix = prefix.substring(0, prefix.lastIndexOf('/')) + if (parentPrefix) { + await _cleanEmptyGCSFolders(bucket, parentPrefix) + } + } + } catch (error) { + // Ignore errors during cleanup + console.error('Error cleaning empty GCS folders:', error) + } +} + +export const getGCSStorageSize = async (orgId: string): Promise => { + const { bucket } = getGcsClient() + let totalSize = 0 + + const [files] = await bucket.getFiles({ prefix: orgId }) + + for (const file of files) { + const size = file.metadata.size + // Handle different types that size could be + if (typeof size === 'string') { + totalSize += parseInt(size, 10) || 0 + } else if (typeof size === 'number') { + totalSize += size + } + } + + return totalSize +} + export const getGcsClient = () => { const pathToGcsCredential = process.env.GOOGLE_CLOUD_STORAGE_CREDENTIAL const projectId = process.env.GOOGLE_CLOUD_STORAGE_PROJ_ID @@ -482,6 +1026,20 @@ export const getGcsClient = () => { return { storage, bucket } } +export const getS3StorageSize = async (orgId: string): Promise => { + const { s3Client, Bucket } = getS3Config() + const getCmd = new ListObjectsCommand({ + Bucket, + Prefix: orgId + }) + const headObj = await s3Client.send(getCmd) + let totalSize = 0 + for (const obj of headObj.Contents || []) { + totalSize += obj.Size || 0 + } + return totalSize +} + export const getS3Config = () => { const accessKeyId = process.env.S3_STORAGE_ACCESS_KEY_ID const secretAccessKey = process.env.S3_STORAGE_SECRET_ACCESS_KEY diff --git a/packages/components/src/utils.ts b/packages/components/src/utils.ts index 957fd4992c7..1e700839d5b 100644 --- a/packages/components/src/utils.ts +++ b/packages/components/src/utils.ts @@ -4,7 +4,7 @@ import * as fs from 'fs' import * as path from 'path' import { JSDOM } from 'jsdom' import { z } from 'zod' -import { DataSource } from 'typeorm' +import { DataSource, Equal } from 'typeorm' import { ICommonObject, IDatabaseEntity, IFileUpload, IMessage, INodeData, IVariable, MessageContentImageUrl } from './Interface' import { AES, enc } from 'crypto-js' import { omit } from 'lodash' @@ -706,7 +706,7 @@ export const getUserHome = (): string => { * @param {IChatMessage[]} chatmessages * @returns {BaseMessage[]} */ -export const mapChatMessageToBaseMessage = async (chatmessages: any[] = []): Promise => { +export const mapChatMessageToBaseMessage = async (chatmessages: any[] = [], orgId: string): Promise => { const chatHistory = [] for (const message of chatmessages) { @@ -722,7 +722,7 @@ export const mapChatMessageToBaseMessage = async (chatmessages: any[] = []): Pro const imageContents: MessageContentImageUrl[] = [] for (const upload of uploads) { if (upload.type === 'stored-file' && upload.mime.startsWith('image/')) { - const fileData = await getFileFromStorage(upload.name, message.chatflowid, message.chatId) + const fileData = await getFileFromStorage(upload.name, orgId, message.chatflowid, message.chatId) // as the image is stored in the server, read the file and convert it to base64 const bf = 'data:' + upload.mime + ';base64,' + fileData.toString('base64') @@ -746,7 +746,8 @@ export const mapChatMessageToBaseMessage = async (chatmessages: any[] = []): Pro const options = { retrieveAttachmentChatId: true, chatflowid: message.chatflowid, - chatId: message.chatId + chatId: message.chatId, + orgId } let fileInputFieldFromMimeType = 'txtFile' fileInputFieldFromMimeType = mapMimeTypeToInputField(upload.mime) @@ -935,8 +936,16 @@ export const convertMultiOptionsToStringArray = (inputString: string): string[] * @param {IDatabaseEntity} databaseEntities * @param {INodeData} nodeData */ -export const getVars = async (appDataSource: DataSource, databaseEntities: IDatabaseEntity, nodeData: INodeData) => { - const variables = ((await appDataSource.getRepository(databaseEntities['Variable']).find()) as IVariable[]) ?? [] +export const getVars = async ( + appDataSource: DataSource, + databaseEntities: IDatabaseEntity, + nodeData: INodeData, + options: ICommonObject +) => { + const variables = + ((await appDataSource + .getRepository(databaseEntities['Variable']) + .findBy(options.workspaceId ? { workspaceId: Equal(options.workspaceId) } : {})) as IVariable[]) ?? [] // override variables defined in overrideConfig // nodeData.inputs.vars is an Object, check each property and override the variable diff --git a/packages/server/.env.example b/packages/server/.env.example index 54db5926855..ea5cb01f90f 100644 --- a/packages/server/.env.example +++ b/packages/server/.env.example @@ -1,19 +1,10 @@ PORT=3000 -# APIKEY_STORAGE_TYPE=json (json | db) -# APIKEY_PATH=/your_api_key_path/.flowise +# APIKEY_PATH=/your_apikey_path/.flowise # (will be deprecated by end of 2025) -# SECRETKEY_STORAGE_TYPE=local #(local | aws) -# SECRETKEY_PATH=/your_api_key_path/.flowise -# FLOWISE_SECRETKEY_OVERWRITE=myencryptionkey -# SECRETKEY_AWS_ACCESS_KEY= -# SECRETKEY_AWS_SECRET_KEY= -# SECRETKEY_AWS_REGION=us-west-2 -# SECRETKEY_AWS_NAME=FlowiseEncryptionKey - -# NUMBER_OF_PROXIES= 1 -# CORS_ORIGINS=* -# IFRAME_ORIGINS=* +############################################################################################################ +############################################## DATABASE #################################################### +############################################################################################################ # DATABASE_PATH=/your_database_path/.flowise # DATABASE_TYPE=postgres @@ -25,26 +16,36 @@ PORT=3000 # DATABASE_SSL=true # DATABASE_SSL_KEY_BASE64= -# FLOWISE_USERNAME=user -# FLOWISE_PASSWORD=1234 -# FLOWISE_FILE_SIZE_LIMIT=50mb + +############################################################################################################ +############################################## SECRET KEYS ################################################# +############################################################################################################ + +# SECRETKEY_STORAGE_TYPE=local #(local | aws) +# SECRETKEY_PATH=/your_secret_path/.flowise +# FLOWISE_SECRETKEY_OVERWRITE=myencryptionkey # (if you want to overwrite the secret key) +# SECRETKEY_AWS_ACCESS_KEY= +# SECRETKEY_AWS_SECRET_KEY= +# SECRETKEY_AWS_REGION=us-west-2 +# SECRETKEY_AWS_NAME=FlowiseEncryptionKey + + +############################################################################################################ +############################################## LOGGING ##################################################### +############################################################################################################ # DEBUG=true # LOG_PATH=/your_log_path/.flowise/logs -# LOG_LEVEL=info (error | warn | info | verbose | debug) +# LOG_LEVEL=info #(error | warn | info | verbose | debug) # TOOL_FUNCTION_BUILTIN_DEP=crypto,fs # TOOL_FUNCTION_EXTERNAL_DEP=moment,lodash -# LANGCHAIN_TRACING_V2=true -# LANGCHAIN_ENDPOINT=https://api.smith.langchain.com -# LANGCHAIN_API_KEY=your_api_key -# LANGCHAIN_PROJECT=your_project -# Uncomment the following line to enable model list config, load the list of models from your local config file -# see https://raw.githubusercontent.com/FlowiseAI/Flowise/main/packages/components/models.json for the format -# MODEL_LIST_CONFIG_JSON=/your_model_list_config_file_path +############################################################################################################ +############################################## STORAGE ##################################################### +############################################################################################################ -# STORAGE_TYPE=local (local | s3) +# STORAGE_TYPE=local (local | s3 | gcs) # BLOB_STORAGE_PATH=/your_storage_path/.flowise/storage # S3_STORAGE_BUCKET_NAME=flowise # S3_STORAGE_ACCESS_KEY_ID= @@ -57,12 +58,69 @@ PORT=3000 # GOOGLE_CLOUD_STORAGE_BUCKET_NAME= # GOOGLE_CLOUD_UNIFORM_BUCKET_ACCESS=true + +############################################################################################################ +############################################## SETTINGS #################################################### +############################################################################################################ + +# NUMBER_OF_PROXIES= 1 +# CORS_ORIGINS=* +# IFRAME_ORIGINS=* +# FLOWISE_FILE_SIZE_LIMIT=50mb # SHOW_COMMUNITY_NODES=true +# DISABLE_FLOWISE_TELEMETRY=true # DISABLED_NODES=bufferMemory,chatOpenAI (comma separated list of node names to disable) +# Uncomment the following line to enable model list config, load the list of models from your local config file +# see https://raw.githubusercontent.com/FlowiseAI/Flowise/main/packages/components/models.json for the format +# MODEL_LIST_CONFIG_JSON=/your_model_list_config_file_path + + +############################################################################################################ +############################################ AUTH PARAMETERS ############################################### +############################################################################################################ + +# APP_URL=http://localhost:3000 + +# SMTP_HOST=smtp.host.com +# SMTP_PORT=465 +# SMTP_USER=smtp_user +# SMTP_PASSWORD=smtp_password +# SMTP_SECURE=true +# ALLOW_UNAUTHORIZED_CERTS=false +# SENDER_EMAIL=team@example.com + +# JWT_AUTH_TOKEN_SECRET='AABBCCDDAABBCCDDAABBCCDDAABBCCDDAABBCCDD' +# JWT_REFRESH_TOKEN_SECRET='AABBCCDDAABBCCDDAABBCCDDAABBCCDDAABBCCDD' +# JWT_ISSUER='ISSUER' +# JWT_AUDIENCE='AUDIENCE' +# JWT_TOKEN_EXPIRY_IN_MINUTES=360 +# JWT_REFRESH_TOKEN_EXPIRY_IN_MINUTES=43200 +# EXPIRE_AUTH_TOKENS_ON_RESTART=true # (if you need to expire all tokens on app restart) +# EXPRESS_SESSION_SECRET=flowise + +# INVITE_TOKEN_EXPIRY_IN_HOURS=24 +# PASSWORD_RESET_TOKEN_EXPIRY_IN_MINS=15 +# PASSWORD_SALT_HASH_ROUNDS=10 +# TOKEN_HASH_SECRET='popcorn' + +# WORKSPACE_INVITE_TEMPLATE_PATH=/path/to/custom/workspace_invite.hbs + + +############################################################################################################ +############################################# ENTERPRISE ################################################### +############################################################################################################ + +# LICENSE_URL= +# FLOWISE_EE_LICENSE_KEY= +# OFFLINE= + + +############################################################################################################ +########################################### METRICS COLLECTION ############################################# +############################################################################################################ + +# POSTHOG_PUBLIC_API_KEY=your_posthog_public_api_key -###################### -# METRICS COLLECTION -####################### # ENABLE_METRICS=false # METRICS_PROVIDER=prometheus # prometheus | open_telemetry # METRICS_INCLUDE_NODE_METRICS=true # default is true @@ -73,15 +131,21 @@ PORT=3000 # METRICS_OPEN_TELEMETRY_PROTOCOL=http # http | grpc | proto (default is http) # METRICS_OPEN_TELEMETRY_DEBUG=true # default is false -# Uncomment the following lines to enable global agent proxy -# see https://www.npmjs.com/package/global-agent for more details + +############################################################################################################ +############################################### PROXY ###################################################### +############################################################################################################ + +# Uncomment the following lines to enable global agent proxy, see https://www.npmjs.com/package/global-agent for more details # GLOBAL_AGENT_HTTP_PROXY=CorporateHttpProxyUrl # GLOBAL_AGENT_HTTPS_PROXY=CorporateHttpsProxyUrl # GLOBAL_AGENT_NO_PROXY=ExceptionHostsToBypassProxyIfNeeded -###################### -# QUEUE CONFIGURATION -####################### + +############################################################################################################ +########################################### QUEUE CONFIGURATION ############################################ +############################################################################################################ + # MODE=queue #(queue | main) # QUEUE_NAME=flowise-queue # QUEUE_REDIS_EVENT_STREAM_MAX_LEN=100000 diff --git a/packages/server/README-ZH.md b/packages/server/README-ZH.md index f0d7992e044..01f2248c2be 100644 --- a/packages/server/README-ZH.md +++ b/packages/server/README-ZH.md @@ -22,15 +22,6 @@ 3. 打开[http://localhost:3000](http://localhost:3000) -## 🔒 身份验证 - -要启用应用级身份验证,请将`FLOWISE_USERNAME`和`FLOWISE_PASSWORD`添加到`.env`文件中: - -``` -FLOWISE_USERNAME=user -FLOWISE_PASSWORD=1234 -``` - ## 🌱 环境变量 Flowise 支持不同的环境变量来配置您的实例。您可以在`packages/server`文件夹中的`.env`文件中指定以下变量。阅读[更多](https://docs.flowiseai.com/environment-variables) diff --git a/packages/server/README.md b/packages/server/README.md index cc52dc52290..af29ebea246 100644 --- a/packages/server/README.md +++ b/packages/server/README.md @@ -22,15 +22,6 @@ English | [中文](./README-ZH.md) 3. Open [http://localhost:3000](http://localhost:3000) -## 🔒 Authentication - -To enable app level authentication, add `FLOWISE_USERNAME` and `FLOWISE_PASSWORD` to the `.env` file: - -``` -FLOWISE_USERNAME=user -FLOWISE_PASSWORD=1234 -``` - ## 🌱 Env Variables Flowise support different environment variables to configure your instance. You can specify the following variables in the `.env` file inside `packages/server` folder. Read [more](https://github.com/FlowiseAI/Flowise/blob/main/CONTRIBUTING.md#-env-variables) diff --git a/packages/server/cypress/e2e/1-apikey/apikey.cy.js b/packages/server/cypress/e2e/1-apikey/apikey.cy.js index 2ce7a2bbc10..06576be881d 100644 --- a/packages/server/cypress/e2e/1-apikey/apikey.cy.js +++ b/packages/server/cypress/e2e/1-apikey/apikey.cy.js @@ -1,3 +1,6 @@ +/* +* TODO: Disabling for now as we need to enable login first +* describe('E2E suite for api/v1/apikey API endpoint', () => { beforeEach(() => { cy.visit('http://localhost:3000/apikey') @@ -43,3 +46,4 @@ describe('E2E suite for api/v1/apikey API endpoint', () => { cy.get('table.MuiTable-root tbody tr').should('have.length', 1) }) }) +*/ diff --git a/packages/server/cypress/e2e/2-variables/variables.cy.js b/packages/server/cypress/e2e/2-variables/variables.cy.js index bcb1ec5c43e..f2b8c340112 100644 --- a/packages/server/cypress/e2e/2-variables/variables.cy.js +++ b/packages/server/cypress/e2e/2-variables/variables.cy.js @@ -1,3 +1,6 @@ +/* +* TODO: Disabling for now as we need to enable login first +* describe('E2E suite for api/v1/variables API endpoint', () => { beforeEach(() => { cy.visit('http://localhost:3000/variables') @@ -9,7 +12,7 @@ describe('E2E suite for api/v1/variables API endpoint', () => { }) // CREATE - it('can add new variable', () => { + it.skip('can add new variable', () => { const newVariableName = 'MafiVariable' const newVariableValue = 'shh!!! secret value' cy.get('#btn_createVariable').click() @@ -21,14 +24,14 @@ describe('E2E suite for api/v1/variables API endpoint', () => { }) // READ - it('can retrieve all api keys', () => { + it.skip('can retrieve all api keys', () => { const newVariableName = 'MafiVariable' cy.get('.MuiTable-root tbody tr').should('have.length', 1) cy.get('.MuiTable-root tbody tr').last().find('th').first().find('div').first().should('have.text', newVariableName) }) // UPDATE - it('can update new api key', () => { + it.skip('can update new api key', () => { const updatedVariableName = 'PichiVariable' const updatedVariableValue = 'silence shh! value' cy.get('.MuiTable-root tbody tr').last().find('td').eq(4).find('button').click() @@ -40,10 +43,11 @@ describe('E2E suite for api/v1/variables API endpoint', () => { }) // DELETE - it('can delete new api key', () => { + it.skip('can delete new api key', () => { cy.get('.MuiTable-root tbody tr').last().find('td').eq(5).find('button').click() cy.get('.MuiDialog-scrollPaper .MuiDialogActions-spacing button').last().click() cy.get('.MuiTable-root tbody tr').should('have.length', 0) cy.get('.MuiCardContent-root .MuiStack-root').last().find('div').last().should('have.text', 'No Variables Yet') }) }) +*/ diff --git a/packages/server/gulpfile.ts b/packages/server/gulpfile.ts new file mode 100644 index 00000000000..8de915dda44 --- /dev/null +++ b/packages/server/gulpfile.ts @@ -0,0 +1,7 @@ +import { dest, src } from 'gulp' + +function copyEmailTemplates() { + return src(['src/enterprise/emails/*.hbs']).pipe(dest('dist/enterprise/emails')) +} + +exports.default = copyEmailTemplates diff --git a/packages/server/jest.config.js b/packages/server/jest.config.js new file mode 100644 index 00000000000..17a5e4ce19a --- /dev/null +++ b/packages/server/jest.config.js @@ -0,0 +1,23 @@ +module.exports = { + // Use ts-jest preset for testing TypeScript files with Jest + preset: 'ts-jest', + // Set the test environment to Node.js + testEnvironment: 'node', + + // Define the root directory for tests and modules + roots: ['/test'], + + // Use ts-jest to transform TypeScript files + transform: { + '^.+\\.tsx?$': 'ts-jest' + }, + + // Regular expression to find test files + testRegex: '((\\.|/)index\\.test)\\.tsx?$', + + // File extensions to recognize in module resolution + moduleFileExtensions: ['ts', 'tsx', 'js', 'jsx', 'json', 'node'], + + // Display individual test results with the test suite hierarchy. + verbose: true +} diff --git a/packages/server/marketplaces/agentflowsv2/Agentic RAG V2.json b/packages/server/marketplaces/agentflowsv2/Agentic RAG V2.json index 343307f1206..c37df8a955c 100644 --- a/packages/server/marketplaces/agentflowsv2/Agentic RAG V2.json +++ b/packages/server/marketplaces/agentflowsv2/Agentic RAG V2.json @@ -12,7 +12,7 @@ "data": { "id": "startAgentflow_0", "label": "Start", - "version": 1, + "version": 1.1, "name": "startAgentflow", "type": "Start", "color": "#7EE787", @@ -157,6 +157,15 @@ ], "id": "startAgentflow_0-input-startState-array", "display": true + }, + { + "label": "Persist State", + "name": "startPersistState", + "type": "boolean", + "description": "Persist the state in the same session", + "optional": true, + "id": "startAgentflow_0-input-startPersistState-boolean", + "display": true } ], "inputAnchors": [], diff --git a/packages/server/marketplaces/agentflowsv2/Agents Handoff.json b/packages/server/marketplaces/agentflowsv2/Agents Handoff.json index 72b4da96989..31d74d71c77 100644 --- a/packages/server/marketplaces/agentflowsv2/Agents Handoff.json +++ b/packages/server/marketplaces/agentflowsv2/Agents Handoff.json @@ -12,7 +12,7 @@ "data": { "id": "startAgentflow_0", "label": "Start", - "version": 1, + "version": 1.1, "name": "startAgentflow", "type": "Start", "color": "#7EE787", @@ -157,6 +157,15 @@ ], "id": "startAgentflow_0-input-startState-array", "display": true + }, + { + "label": "Persist State", + "name": "startPersistState", + "type": "boolean", + "description": "Persist the state in the same session", + "optional": true, + "id": "startAgentflow_0-input-startPersistState-boolean", + "display": true } ], "inputAnchors": [], diff --git a/packages/server/marketplaces/agentflowsv2/Deep Research V2.json b/packages/server/marketplaces/agentflowsv2/Deep Research V2.json index bd58656b844..bad7e2efded 100644 --- a/packages/server/marketplaces/agentflowsv2/Deep Research V2.json +++ b/packages/server/marketplaces/agentflowsv2/Deep Research V2.json @@ -12,7 +12,7 @@ "data": { "id": "startAgentflow_0", "label": "Start", - "version": 1, + "version": 1.1, "name": "startAgentflow", "type": "Start", "color": "#7EE787", @@ -160,6 +160,15 @@ ], "id": "startAgentflow_0-input-startState-array", "display": true + }, + { + "label": "Persist State", + "name": "startPersistState", + "type": "boolean", + "description": "Persist the state in the same session", + "optional": true, + "id": "startAgentflow_0-input-startPersistState-boolean", + "display": true } ], "inputAnchors": [], diff --git a/packages/server/marketplaces/agentflowsv2/Email Reply HITL Agent.json b/packages/server/marketplaces/agentflowsv2/Email Reply HITL Agent.json index 0174b1a90f3..2ab7e4d9282 100644 --- a/packages/server/marketplaces/agentflowsv2/Email Reply HITL Agent.json +++ b/packages/server/marketplaces/agentflowsv2/Email Reply HITL Agent.json @@ -12,7 +12,7 @@ "data": { "id": "startAgentflow_0", "label": "Start", - "version": 1, + "version": 1.1, "name": "startAgentflow", "type": "Start", "color": "#7EE787", @@ -157,6 +157,15 @@ ], "id": "startAgentflow_0-input-startState-array", "display": true + }, + { + "label": "Persist State", + "name": "startPersistState", + "type": "boolean", + "description": "Persist the state in the same session", + "optional": true, + "id": "startAgentflow_0-input-startPersistState-boolean", + "display": true } ], "inputAnchors": [], diff --git a/packages/server/marketplaces/agentflowsv2/Financial Research Agent.json b/packages/server/marketplaces/agentflowsv2/Financial Research Agent.json index f72312e7510..ac4dab53f31 100644 --- a/packages/server/marketplaces/agentflowsv2/Financial Research Agent.json +++ b/packages/server/marketplaces/agentflowsv2/Financial Research Agent.json @@ -12,7 +12,7 @@ "data": { "id": "startAgentflow_0", "label": "Start", - "version": 1, + "version": 1.1, "name": "startAgentflow", "type": "Start", "color": "#7EE787", @@ -157,6 +157,15 @@ ], "id": "startAgentflow_0-input-startState-array", "display": true + }, + { + "label": "Persist State", + "name": "startPersistState", + "type": "boolean", + "description": "Persist the state in the same session", + "optional": true, + "id": "startAgentflow_0-input-startPersistState-boolean", + "display": true } ], "inputAnchors": [], diff --git a/packages/server/marketplaces/agentflowsv2/Iterations.json b/packages/server/marketplaces/agentflowsv2/Iterations.json index b33dd1a544d..39eb72bd61a 100644 --- a/packages/server/marketplaces/agentflowsv2/Iterations.json +++ b/packages/server/marketplaces/agentflowsv2/Iterations.json @@ -12,7 +12,7 @@ "data": { "id": "startAgentflow_0", "label": "Start", - "version": 1, + "version": 1.1, "name": "startAgentflow", "type": "Start", "color": "#7EE787", @@ -157,6 +157,15 @@ ], "id": "startAgentflow_0-input-startState-array", "display": true + }, + { + "label": "Persist State", + "name": "startPersistState", + "type": "boolean", + "description": "Persist the state in the same session", + "optional": true, + "id": "startAgentflow_0-input-startPersistState-boolean", + "display": true } ], "inputAnchors": [], diff --git a/packages/server/marketplaces/agentflowsv2/Slack Agent.json b/packages/server/marketplaces/agentflowsv2/Slack Agent.json index cd30db6461f..ec95fb4b9fc 100644 --- a/packages/server/marketplaces/agentflowsv2/Slack Agent.json +++ b/packages/server/marketplaces/agentflowsv2/Slack Agent.json @@ -12,7 +12,7 @@ "data": { "id": "startAgentflow_0", "label": "Start", - "version": 1, + "version": 1.1, "name": "startAgentflow", "type": "Start", "color": "#7EE787", @@ -157,6 +157,15 @@ ], "id": "startAgentflow_0-input-startState-array", "display": true + }, + { + "label": "Persist State", + "name": "startPersistState", + "type": "boolean", + "description": "Persist the state in the same session", + "optional": true, + "id": "startAgentflow_0-input-startPersistState-boolean", + "display": true } ], "inputAnchors": [], diff --git a/packages/server/marketplaces/agentflowsv2/Supervisor Worker.json b/packages/server/marketplaces/agentflowsv2/Supervisor Worker.json index dbf60b3353f..1773ce6a865 100644 --- a/packages/server/marketplaces/agentflowsv2/Supervisor Worker.json +++ b/packages/server/marketplaces/agentflowsv2/Supervisor Worker.json @@ -12,7 +12,7 @@ "data": { "id": "startAgentflow_0", "label": "Start", - "version": 1, + "version": 1.1, "name": "startAgentflow", "type": "Start", "color": "#7EE787", @@ -157,6 +157,15 @@ ], "id": "startAgentflow_0-input-startState-array", "display": true + }, + { + "label": "Persist State", + "name": "startPersistState", + "type": "boolean", + "description": "Persist the state in the same session", + "optional": true, + "id": "startAgentflow_0-input-startPersistState-boolean", + "display": true } ], "inputAnchors": [], diff --git a/packages/server/package.json b/packages/server/package.json index 2075f39494c..5ebe8318eca 100644 --- a/packages/server/package.json +++ b/packages/server/package.json @@ -1,6 +1,6 @@ { "name": "flowise", - "version": "3.0.0", + "version": "3.0.1", "description": "Flowiseai Server", "main": "dist/index", "types": "dist/index.d.ts", @@ -20,7 +20,7 @@ "commands": "./dist/commands" }, "scripts": { - "build": "tsc", + "build": "tsc && rimraf dist/enterprise/emails && gulp", "start": "run-script-os", "clean": "rimraf dist", "nuke": "rimraf dist node_modules .turbo", @@ -37,13 +37,14 @@ "typeorm": "typeorm-ts-node-commonjs", "typeorm:migration-generate": "pnpm typeorm migration:generate -d ./src/utils/typeormDataSource.ts", "typeorm:migration-run": "pnpm typeorm migration:run -d ./src/utils/typeormDataSource.ts", + "typeorm:migration-revert": "pnpm typeorm migration:revert -d ./src/utils/typeormDataSource.ts", "watch": "tsc --watch", "version": "oclif readme && git add README.md", "cypress:open": "cypress open", "cypress:run": "cypress run", "e2e": "start-server-and-test dev http://localhost:3000 cypress:run", "cypress:ci": "START_SERVER_AND_TEST_INSECURE=1 start-server-and-test start https-get://localhost:3000 cypress:run", - "test": "jest" + "test": "jest --runInBand --detectOpenHandles --forceExit" }, "keywords": [], "homepage": "https://flowiseai.com", @@ -58,6 +59,7 @@ "dependencies": { "@aws-sdk/client-secrets-manager": "^3.699.0", "@google-cloud/logging-winston": "^6.0.0", + "@keyv/redis": "^4.2.0", "@oclif/core": "4.0.7", "@opentelemetry/api": "^1.3.0", "@opentelemetry/auto-instrumentations-node": "^0.52.0", @@ -73,24 +75,42 @@ "@opentelemetry/sdk-node": "^0.54.0", "@opentelemetry/sdk-trace-base": "1.27.0", "@opentelemetry/semantic-conventions": "1.27.0", + "@types/bcryptjs": "^2.4.6", "@types/lodash": "^4.14.202", + "@types/passport": "^1.0.16", + "@types/passport-jwt": "^4.0.1", + "@types/passport-local": "^1.0.38", "@types/uuid": "^9.0.7", "async-mutex": "^0.4.0", "axios": "1.7.9", + "bcryptjs": "^2.4.3", "bull-board": "^2.1.3", - "bullmq": "^5.42.0", + "bullmq": "5.45.2", + "cache-manager": "^6.3.2", + "connect-pg-simple": "^10.0.0", + "connect-redis": "^8.0.1", + "connect-sqlite3": "^0.9.15", "content-disposition": "0.5.4", + "cookie-parser": "^1.4.6", "cors": "^2.8.5", "crypto-js": "^4.1.1", + "csv-parser": "^3.0.0", "dotenv": "^16.0.0", "express": "^4.17.3", "express-basic-auth": "^1.2.1", + "express-mysql-session": "^3.0.3", "express-rate-limit": "^6.9.0", + "express-session": "^1.18.1", "flowise-components": "workspace:^", + "flowise-nim-container-manager": "^1.0.11", "flowise-ui": "workspace:^", "global-agent": "^3.0.0", + "gulp": "^4.0.2", + "handlebars": "^4.7.8", "http-errors": "^2.0.0", "http-status-codes": "^2.3.0", + "jsonwebtoken": "^9.0.2", + "jwt-decode": "^4.0.0", "langchainhub": "^0.0.11", "lodash": "^4.17.21", "moment": "^2.29.3", @@ -99,8 +119,17 @@ "multer-cloud-storage": "^4.0.0", "multer-s3": "^3.0.1", "mysql2": "^3.11.3", - "flowise-nim-container-manager": "^1.0.11", + "nanoid": "3", + "nodemailer": "^6.9.14", "openai": "^4.96.0", + "passport": "^0.7.0", + "passport-auth0": "^1.4.4", + "passport-cookie": "^1.0.9", + "passport-github": "^1.1.0", + "passport-google-oauth20": "^2.0.0", + "passport-jwt": "^4.0.1", + "passport-local": "^1.0.0", + "passport-openidconnect": "^0.1.2", "pg": "^8.11.1", "posthog-node": "^3.5.0", "prom-client": "^15.1.3", @@ -109,27 +138,41 @@ "s3-streamlogger": "^1.11.0", "sanitize-html": "^2.11.0", "sqlite3": "^5.1.6", + "stripe": "^15.6.0", "turndown": "^7.2.0", "typeorm": "^0.3.6", "uuid": "^9.0.1", - "winston": "^3.9.0" + "winston": "^3.9.0", + "winston-daily-rotate-file": "^5.0.0" }, "devDependencies": { "@types/content-disposition": "0.5.8", + "@types/cookie-parser": "^1.4.7", "@types/cors": "^2.8.12", "@types/crypto-js": "^4.1.1", + "@types/express-session": "^1.18.0", + "@types/jest": "^29.5.14", + "@types/jsonwebtoken": "^9.0.6", "@types/multer": "^1.4.7", "@types/multer-s3": "^3.0.3", + "@types/nodemailer": "^6.4.15", + "@types/passport-auth0": "^1.0.9", + "@types/passport-github": "^1.1.12", + "@types/passport-openidconnect": "^0.1.3", "@types/sanitize-html": "^2.9.5", + "@types/supertest": "^6.0.3", "@types/turndown": "^5.0.5", "concurrently": "^7.1.0", "cypress": "^13.13.0", + "jest": "^29.7.0", "nodemon": "^2.0.22", "oclif": "^3", "rimraf": "^5.0.5", "run-script-os": "^1.1.6", "shx": "^0.3.3", "start-server-and-test": "^2.0.3", + "supertest": "^7.1.0", + "ts-jest": "^29.3.2", "ts-node": "^10.7.0", "tsc-watch": "^6.0.4", "typescript": "^5.4.5" diff --git a/packages/server/src/AppConfig.ts b/packages/server/src/AppConfig.ts index fa3919aaec0..8a838254e0a 100644 --- a/packages/server/src/AppConfig.ts +++ b/packages/server/src/AppConfig.ts @@ -1,7 +1,4 @@ export const appConfig = { - apiKeys: { - storageType: process.env.APIKEY_STORAGE_TYPE ? process.env.APIKEY_STORAGE_TYPE.toLowerCase() : 'json' - }, showCommunityNodes: process.env.SHOW_COMMUNITY_NODES ? process.env.SHOW_COMMUNITY_NODES.toLowerCase() === 'true' : false // todo: add more config options here like database, log, storage, credential and allow modification from UI } diff --git a/packages/server/src/DataSource.ts b/packages/server/src/DataSource.ts index 811f62b6aa5..f6e43ba7293 100644 --- a/packages/server/src/DataSource.ts +++ b/packages/server/src/DataSource.ts @@ -8,6 +8,7 @@ import { sqliteMigrations } from './database/migrations/sqlite' import { mysqlMigrations } from './database/migrations/mysql' import { mariadbMigrations } from './database/migrations/mariadb' import { postgresMigrations } from './database/migrations/postgres' +import logger from './utils/logger' let appDataSource: DataSource @@ -73,7 +74,17 @@ export const init = async (): Promise => { synchronize: false, migrationsRun: false, entities: Object.values(entities), - migrations: postgresMigrations + migrations: postgresMigrations, + extra: { + idleTimeoutMillis: 120000 + }, + logging: ['error', 'warn', 'info', 'log'], + logger: 'advanced-console', + logNotifications: true, + poolErrorHandler: (err) => { + logger.error(`Database pool error: ${JSON.stringify(err)}`) + }, + applicationName: 'Flowise' }) break default: @@ -97,7 +108,7 @@ export function getDataSource(): DataSource { return appDataSource } -const getDatabaseSSLFromEnv = () => { +export const getDatabaseSSLFromEnv = () => { if (process.env.DATABASE_SSL_KEY_BASE64) { return { rejectUnauthorized: false, diff --git a/packages/server/src/IdentityManager.ts b/packages/server/src/IdentityManager.ts new file mode 100644 index 00000000000..c56903be9c2 --- /dev/null +++ b/packages/server/src/IdentityManager.ts @@ -0,0 +1,524 @@ +/** + * Copyright (c) 2023-present FlowiseAI, Inc. + * + * The Enterprise and Cloud versions of Flowise are licensed under the [Commercial License](https://github.com/FlowiseAI/Flowise/tree/main/packages/server/src/enterprise/LICENSE.md). + * Unauthorized copying, modification, distribution, or use of the Enterprise and Cloud versions is strictly prohibited without a valid license agreement from FlowiseAI, Inc. + * + * The Open Source version is licensed under the Apache License, Version 2.0 (the "License") + * + * For information about licensing of the Enterprise and Cloud versions, please contact: + * security@flowiseai.com + */ + +import axios from 'axios' +import express, { Application, NextFunction, Request, Response } from 'express' +import * as fs from 'fs' +import { StatusCodes } from 'http-status-codes' +import jwt from 'jsonwebtoken' +import path from 'path' +import { LoginMethodStatus } from './enterprise/database/entities/login-method.entity' +import { ErrorMessage, LoggedInUser } from './enterprise/Interface.Enterprise' +import { Permissions } from './enterprise/rbac/Permissions' +import { LoginMethodService } from './enterprise/services/login-method.service' +import { OrganizationService } from './enterprise/services/organization.service' +import Auth0SSO from './enterprise/sso/Auth0SSO' +import AzureSSO from './enterprise/sso/AzureSSO' +import GithubSSO from './enterprise/sso/GithubSSO' +import GoogleSSO from './enterprise/sso/GoogleSSO' +import SSOBase from './enterprise/sso/SSOBase' +import { InternalFlowiseError } from './errors/internalFlowiseError' +import { Platform, UserPlan } from './Interface' +import { StripeManager } from './StripeManager' +import { UsageCacheManager } from './UsageCacheManager' +import { GeneralErrorMessage, LICENSE_QUOTAS } from './utils/constants' +import { getRunningExpressApp } from './utils/getRunningExpressApp' +import { ENTERPRISE_FEATURE_FLAGS } from './utils/quotaUsage' +import Stripe from 'stripe' + +const allSSOProviders = ['azure', 'google', 'auth0', 'github'] +export class IdentityManager { + private static instance: IdentityManager + private stripeManager?: StripeManager + licenseValid: boolean = false + permissions: Permissions + ssoProviderName: string = '' + currentInstancePlatform: Platform = Platform.OPEN_SOURCE + // create a map to store the sso provider name and the sso provider instance + ssoProviders: Map = new Map() + + public static async getInstance(): Promise { + if (!IdentityManager.instance) { + IdentityManager.instance = new IdentityManager() + await IdentityManager.instance.initialize() + } + return IdentityManager.instance + } + + public async initialize() { + await this._validateLicenseKey() + this.permissions = new Permissions() + if (process.env.STRIPE_SECRET_KEY) { + this.stripeManager = await StripeManager.getInstance() + } + } + + public getPlatformType = () => { + return this.currentInstancePlatform + } + + public getPermissions = () => { + return this.permissions + } + + public isEnterprise = () => { + return this.currentInstancePlatform === Platform.ENTERPRISE + } + + public isCloud = () => { + return this.currentInstancePlatform === Platform.CLOUD + } + + public isOpenSource = () => { + return this.currentInstancePlatform === Platform.OPEN_SOURCE + } + + public isLicenseValid = () => { + return this.licenseValid + } + + private _offlineVerifyLicense(licenseKey: string): any { + try { + const publicKey = fs.readFileSync(path.join(__dirname, '../', 'src/enterprise/license/public.pem'), 'utf8') + const decoded = jwt.verify(licenseKey, publicKey, { + algorithms: ['RS256'] + }) + return decoded + } catch (error) { + console.error('Error verifying license key:', error) + return null + } + } + + private _validateLicenseKey = async () => { + const LICENSE_URL = process.env.LICENSE_URL + const FLOWISE_EE_LICENSE_KEY = process.env.FLOWISE_EE_LICENSE_KEY + + // First check if license key is missing + if (!FLOWISE_EE_LICENSE_KEY) { + this.licenseValid = false + this.currentInstancePlatform = Platform.OPEN_SOURCE + return + } + + try { + if (process.env.OFFLINE === 'true') { + const decodedLicense = this._offlineVerifyLicense(FLOWISE_EE_LICENSE_KEY) + + if (!decodedLicense) { + this.licenseValid = false + } else { + const issuedAtSeconds = decodedLicense.iat + if (!issuedAtSeconds) { + this.licenseValid = false + } else { + const issuedAt = new Date(issuedAtSeconds * 1000) + const expiryDurationInMonths = decodedLicense.expiryDurationInMonths || 0 + + const expiryDate = new Date(issuedAt) + expiryDate.setMonth(expiryDate.getMonth() + expiryDurationInMonths) + + if (new Date() > expiryDate) { + this.licenseValid = false + } else { + this.licenseValid = true + } + } + } + this.currentInstancePlatform = Platform.ENTERPRISE + } else if (LICENSE_URL) { + try { + const response = await axios.post(`${LICENSE_URL}/enterprise/verify`, { license: FLOWISE_EE_LICENSE_KEY }) + this.licenseValid = response.data?.valid + + if (!LICENSE_URL.includes('api')) this.currentInstancePlatform = Platform.ENTERPRISE + else if (LICENSE_URL.includes('v1')) this.currentInstancePlatform = Platform.ENTERPRISE + else if (LICENSE_URL.includes('v2')) this.currentInstancePlatform = response.data?.platform + else throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, GeneralErrorMessage.UNHANDLED_EDGE_CASE) + } catch (error) { + console.error('Error verifying license key:', error) + this.licenseValid = false + this.currentInstancePlatform = Platform.ENTERPRISE + return + } + } + } catch (error) { + this.licenseValid = false + } + } + + public initializeSSO = async (app: express.Application) => { + if (this.getPlatformType() === Platform.CLOUD || this.getPlatformType() === Platform.ENTERPRISE) { + const loginMethodService = new LoginMethodService() + let queryRunner + try { + queryRunner = getRunningExpressApp().AppDataSource.createQueryRunner() + await queryRunner.connect() + let organizationId = undefined + if (this.getPlatformType() === Platform.ENTERPRISE) { + const organizationService = new OrganizationService() + const organizations = await organizationService.readOrganization(queryRunner) + if (organizations.length > 0) { + organizationId = organizations[0].id + } else { + this.initializeEmptySSO(app) + return + } + } + const loginMethods = await loginMethodService.readLoginMethodByOrganizationId(organizationId, queryRunner) + if (loginMethods && loginMethods.length > 0) { + for (let method of loginMethods) { + if (method.status === LoginMethodStatus.ENABLE) { + method.config = JSON.parse(await loginMethodService.decryptLoginMethodConfig(method.config)) + this.initializeSsoProvider(app, method.name, method.config) + } + } + } + } finally { + if (queryRunner) await queryRunner.release() + } + } + // iterate through the remaining providers and initialize them with configEnabled as false + this.initializeEmptySSO(app) + } + + initializeEmptySSO(app: Application) { + allSSOProviders.map((providerName) => { + if (!this.ssoProviders.has(providerName)) { + this.initializeSsoProvider(app, providerName, undefined) + } + }) + } + + initializeSsoProvider(app: Application, providerName: string, providerConfig: any) { + if (this.ssoProviders.has(providerName)) { + const provider = this.ssoProviders.get(providerName) + if (provider) { + if (providerConfig && providerConfig.configEnabled === true) { + provider.setSSOConfig(providerConfig) + provider.initialize() + } else { + // if false, disable the provider + provider.setSSOConfig(undefined) + } + } + } else { + switch (providerName) { + case 'azure': { + const azureSSO = new AzureSSO(app, providerConfig) + azureSSO.initialize() + this.ssoProviders.set(providerName, azureSSO) + break + } + case 'google': { + const googleSSO = new GoogleSSO(app, providerConfig) + googleSSO.initialize() + this.ssoProviders.set(providerName, googleSSO) + break + } + case 'auth0': { + const auth0SSO = new Auth0SSO(app, providerConfig) + auth0SSO.initialize() + this.ssoProviders.set(providerName, auth0SSO) + break + } + case 'github': { + const githubSSO = new GithubSSO(app, providerConfig) + githubSSO.initialize() + this.ssoProviders.set(providerName, githubSSO) + break + } + default: + throw new Error(`SSO Provider ${providerName} not found`) + } + } + } + + async getRefreshToken(providerName: any, ssoRefreshToken: string) { + if (!this.ssoProviders.has(providerName)) { + throw new Error(`SSO Provider ${providerName} not found`) + } + return await (this.ssoProviders.get(providerName) as SSOBase).refreshToken(ssoRefreshToken) + } + + public async getProductIdFromSubscription(subscriptionId: string) { + if (!subscriptionId) return '' + if (!this.stripeManager) { + throw new Error('Stripe manager is not initialized') + } + return await this.stripeManager.getProductIdFromSubscription(subscriptionId) + } + + public async getFeaturesByPlan(subscriptionId: string, withoutCache: boolean = false) { + if (this.isEnterprise()) { + const features: Record = {} + for (const feature of ENTERPRISE_FEATURE_FLAGS) { + features[feature] = 'true' + } + return features + } else if (this.isCloud()) { + if (!this.stripeManager || !subscriptionId) { + return {} + } + return await this.stripeManager.getFeaturesByPlan(subscriptionId, withoutCache) + } + return {} + } + + public static checkFeatureByPlan(feature: string) { + return (req: Request, res: Response, next: NextFunction) => { + const user = req.user + if (user) { + if (!user.features || Object.keys(user.features).length === 0) { + return res.status(403).json({ message: ErrorMessage.FORBIDDEN }) + } + if (Object.keys(user.features).includes(feature) && user.features[feature] === 'true') { + return next() + } + } + return res.status(403).json({ message: ErrorMessage.FORBIDDEN }) + } + } + + public async createStripeCustomerPortalSession(req: Request) { + if (!this.stripeManager) { + throw new Error('Stripe manager is not initialized') + } + return await this.stripeManager.createStripeCustomerPortalSession(req) + } + + public async getAdditionalSeatsQuantity(subscriptionId: string) { + if (!subscriptionId) return {} + if (!this.stripeManager) { + throw new Error('Stripe manager is not initialized') + } + return await this.stripeManager.getAdditionalSeatsQuantity(subscriptionId) + } + + public async getCustomerWithDefaultSource(customerId: string) { + if (!customerId) return + if (!this.stripeManager) { + throw new Error('Stripe manager is not initialized') + } + return await this.stripeManager.getCustomerWithDefaultSource(customerId) + } + + public async getAdditionalSeatsProration(subscriptionId: string, newQuantity: number) { + if (!subscriptionId) return {} + if (!this.stripeManager) { + throw new Error('Stripe manager is not initialized') + } + return await this.stripeManager.getAdditionalSeatsProration(subscriptionId, newQuantity) + } + + public async updateAdditionalSeats(subscriptionId: string, quantity: number, prorationDate: number) { + if (!subscriptionId) return {} + + if (!this.stripeManager) { + throw new Error('Stripe manager is not initialized') + } + const { success, subscription, invoice } = await this.stripeManager.updateAdditionalSeats(subscriptionId, quantity, prorationDate) + + // Fetch product details to get quotas + const items = subscription.items.data + if (items.length === 0) { + throw new Error('No subscription items found') + } + + const productId = items[0].price.product as string + const product = await this.stripeManager.getStripe().products.retrieve(productId) + const productMetadata = product.metadata + + // Extract quotas from metadata + const quotas: Record = {} + for (const key in productMetadata) { + if (key.startsWith('quota:')) { + quotas[key] = parseInt(productMetadata[key]) + } + } + quotas[LICENSE_QUOTAS.ADDITIONAL_SEATS_LIMIT] = quantity + + // Get features from Stripe + const features = await this.getFeaturesByPlan(subscription.id, true) + + // Update the cache with new subscription data including quotas + const cacheManager = await UsageCacheManager.getInstance() + await cacheManager.updateSubscriptionDataToCache(subscriptionId, { + features, + quotas, + subsriptionDetails: this.stripeManager.getSubscriptionObject(subscription) + }) + + return { success, subscription, invoice } + } + + public async getPlanProration(subscriptionId: string, newPlanId: string) { + if (!subscriptionId || !newPlanId) return {} + + if (!this.stripeManager) { + throw new Error('Stripe manager is not initialized') + } + return await this.stripeManager.getPlanProration(subscriptionId, newPlanId) + } + + public async updateSubscriptionPlan(req: Request, subscriptionId: string, newPlanId: string, prorationDate: number) { + if (!subscriptionId || !newPlanId) return {} + + if (!this.stripeManager) { + throw new Error('Stripe manager is not initialized') + } + if (!req.user) { + throw new InternalFlowiseError(StatusCodes.UNAUTHORIZED, GeneralErrorMessage.UNAUTHORIZED) + } + const { success, subscription } = await this.stripeManager.updateSubscriptionPlan(subscriptionId, newPlanId, prorationDate) + if (success) { + // Fetch product details to get quotas + const product = await this.stripeManager.getStripe().products.retrieve(newPlanId) + const productMetadata = product.metadata + + // Extract quotas from metadata + const quotas: Record = {} + for (const key in productMetadata) { + if (key.startsWith('quota:')) { + quotas[key] = parseInt(productMetadata[key]) + } + } + + const additionalSeatsItem = subscription.items.data.find( + (item) => (item.price.product as string) === process.env.ADDITIONAL_SEAT_ID + ) + quotas[LICENSE_QUOTAS.ADDITIONAL_SEATS_LIMIT] = additionalSeatsItem?.quantity || 0 + + // Get features from Stripe + const features = await this.getFeaturesByPlan(subscription.id, true) + + // Update the cache with new subscription data including quotas + const cacheManager = await UsageCacheManager.getInstance() + + const updateCacheData: Record = { + features, + quotas, + subsriptionDetails: this.stripeManager.getSubscriptionObject(subscription) + } + + if ( + newPlanId === process.env.CLOUD_FREE_ID || + newPlanId === process.env.CLOUD_STARTER_ID || + newPlanId === process.env.CLOUD_PRO_ID + ) { + updateCacheData.productId = newPlanId + } + + await cacheManager.updateSubscriptionDataToCache(subscriptionId, updateCacheData) + + const loggedInUser: LoggedInUser = { + ...req.user, + activeOrganizationSubscriptionId: subscription.id, + features + } + + if ( + newPlanId === process.env.CLOUD_FREE_ID || + newPlanId === process.env.CLOUD_STARTER_ID || + newPlanId === process.env.CLOUD_PRO_ID + ) { + loggedInUser.activeOrganizationProductId = newPlanId + } + + req.user = { + ...req.user, + ...loggedInUser + } + + // Update passport session + // @ts-ignore + req.session.passport.user = { + ...req.user, + ...loggedInUser + } + + req.session.save((err) => { + if (err) throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, GeneralErrorMessage.UNHANDLED_EDGE_CASE) + }) + + return { + status: 'success', + user: loggedInUser + } + } + return { + status: 'error', + message: 'Payment or subscription update not completed' + } + } + + public async createStripeUserAndSubscribe({ email, userPlan, referral }: { email: string; userPlan: UserPlan; referral?: string }) { + if (!this.stripeManager) { + throw new Error('Stripe manager is not initialized') + } + + try { + // Create a customer in Stripe + let customer: Stripe.Response + if (referral) { + customer = await this.stripeManager.getStripe().customers.create({ + email: email, + metadata: { + referral + } + }) + } else { + customer = await this.stripeManager.getStripe().customers.create({ + email: email + }) + } + + let productId = '' + switch (userPlan) { + case UserPlan.STARTER: + productId = process.env.CLOUD_STARTER_ID as string + break + case UserPlan.PRO: + productId = process.env.CLOUD_PRO_ID as string + break + case UserPlan.FREE: + productId = process.env.CLOUD_FREE_ID as string + break + } + + // Get the default price ID for the product + const prices = await this.stripeManager.getStripe().prices.list({ + product: productId, + active: true, + limit: 1 + }) + + if (!prices.data.length) { + throw new Error('No active price found for the product') + } + + // Create the subscription + const subscription = await this.stripeManager.getStripe().subscriptions.create({ + customer: customer.id, + items: [{ price: prices.data[0].id }] + }) + + return { + customerId: customer.id, + subscriptionId: subscription.id + } + } catch (error) { + console.error('Error creating Stripe user and subscription:', error) + throw error + } + } +} diff --git a/packages/server/src/Interface.DocumentStore.ts b/packages/server/src/Interface.DocumentStore.ts index 34d92978f75..93ec640cb7e 100644 --- a/packages/server/src/Interface.DocumentStore.ts +++ b/packages/server/src/Interface.DocumentStore.ts @@ -4,6 +4,7 @@ import { DataSource } from 'typeorm' import { IComponentNodes } from './Interface' import { Telemetry } from './utils/telemetry' import { CachePool } from './CachePool' +import { UsageCacheManager } from './UsageCacheManager' export enum DocumentStoreStatus { EMPTY_SYNC = 'EMPTY', @@ -27,6 +28,7 @@ export interface IDocumentStore { vectorStoreConfig: string | null // JSON string embeddingConfig: string | null // JSON string recordManagerConfig: string | null // JSON string + workspaceId?: string } export interface IDocumentStoreFileChunk { @@ -47,6 +49,7 @@ export interface IDocumentStoreFileChunkPagedResponse { storeName: string description: string docId: string + workspaceId?: string } export interface IDocumentStoreLoader { @@ -119,9 +122,13 @@ export interface IDocumentStoreWhereUsed { } export interface IUpsertQueueAppServer { + orgId: string + workspaceId: string + subscriptionId: string appDataSource: DataSource componentNodes: IComponentNodes telemetry: Telemetry + usageCacheManager: UsageCacheManager cachePool?: CachePool } @@ -231,6 +238,7 @@ export class DocumentStoreDTO { totalChunks: number totalChars: number chunkSize: number + workspaceId?: string loaders: IDocumentStoreLoader[] vectorStoreConfig: any embeddingConfig: any @@ -246,6 +254,7 @@ export class DocumentStoreDTO { documentStoreDTO.name = entity.name documentStoreDTO.description = entity.description documentStoreDTO.status = entity.status + documentStoreDTO.workspaceId = entity.workspaceId documentStoreDTO.totalChars = 0 documentStoreDTO.totalChunks = 0 diff --git a/packages/server/src/Interface.Evaluation.ts b/packages/server/src/Interface.Evaluation.ts new file mode 100644 index 00000000000..ad00151374b --- /dev/null +++ b/packages/server/src/Interface.Evaluation.ts @@ -0,0 +1,139 @@ +// Evaluation Related Interfaces +import { Evaluator } from './database/entities/Evaluator' + +export interface IDataset { + id: string + name: string + description: string + createdDate: Date + updatedDate: Date + workspaceId?: string +} +export interface IDatasetRow { + id: string + datasetId: string + input: string + output: string + updatedDate: Date + sequenceNo: number +} + +export enum EvaluationStatus { + PENDING = 'pending', + COMPLETED = 'completed', + ERROR = 'error' +} + +export interface IEvaluation { + id: string + name: string + chatflowId: string + chatflowName: string + datasetId: string + datasetName: string + evaluationType: string + additionalConfig: string //json + average_metrics: string //json + status: string + runDate: Date + workspaceId?: string +} + +export interface IEvaluationResult extends IEvaluation { + latestEval: boolean + version: number +} + +export interface IEvaluationRun { + id: string + evaluationId: string + input: string + expectedOutput: string + actualOutput: string // JSON + metrics: string // JSON + runDate: Date + llmEvaluators?: string // JSON + evaluators?: string // JSON + errors?: string // JSON +} + +export interface IEvaluator { + id: string + name: string + type: string + config: string // JSON + updatedDate: Date + createdDate: Date + workspaceId?: string +} + +export class EvaluatorDTO { + id: string + name: string + type: string + measure?: string + operator?: string + value?: string + prompt?: string + evaluatorType?: string + outputSchema?: [] + updatedDate: Date + createdDate: Date + + static toEntity(body: any): Evaluator { + const newDs = new Evaluator() + Object.assign(newDs, body) + let config: any = {} + if (body.type === 'llm') { + config = { + prompt: body.prompt, + outputSchema: body.outputSchema + } + } else if (body.type === 'text') { + config = { + operator: body.operator, + value: body.value + } + } else if (body.type === 'json') { + config = { + operator: body.operator + } + } else if (body.type === 'numeric') { + config = { + operator: body.operator, + value: body.value, + measure: body.measure + } + } else { + throw new Error('Invalid evaluator type') + } + newDs.config = JSON.stringify(config) + return newDs + } + + static fromEntity(entity: Evaluator): EvaluatorDTO { + const newDs = new EvaluatorDTO() + Object.assign(newDs, entity) + const config = JSON.parse(entity.config) + if (entity.type === 'llm') { + newDs.prompt = config.prompt + newDs.outputSchema = config.outputSchema + } else if (entity.type === 'text') { + newDs.operator = config.operator + newDs.value = config.value + } else if (entity.type === 'json') { + newDs.operator = config.operator + newDs.value = config.value + } else if (entity.type === 'numeric') { + newDs.operator = config.operator + newDs.value = config.value + newDs.measure = config.measure + } + delete (newDs as any).config + return newDs + } + + static fromEntities(entities: Evaluator[]): EvaluatorDTO[] { + return entities.map((entity) => this.fromEntity(entity)) + } +} diff --git a/packages/server/src/Interface.ts b/packages/server/src/Interface.ts index 7865727ce1e..e24c44c4524 100644 --- a/packages/server/src/Interface.ts +++ b/packages/server/src/Interface.ts @@ -12,6 +12,7 @@ import { import { DataSource } from 'typeorm' import { CachePool } from './CachePool' import { Telemetry } from './utils/telemetry' +import { UsageCacheManager } from './UsageCacheManager' export type MessageType = 'apiMessage' | 'userMessage' @@ -28,13 +29,27 @@ export enum MODE { export enum ChatType { INTERNAL = 'INTERNAL', - EXTERNAL = 'EXTERNAL' + EXTERNAL = 'EXTERNAL', + EVALUATION = 'EVALUATION' } export enum ChatMessageRatingType { THUMBS_UP = 'THUMBS_UP', THUMBS_DOWN = 'THUMBS_DOWN' } + +export enum Platform { + OPEN_SOURCE = 'open source', + CLOUD = 'cloud', + ENTERPRISE = 'enterprise' +} + +export enum UserPlan { + STARTER = 'STARTER', + PRO = 'PRO', + FREE = 'FREE' +} + /** * Databases */ @@ -54,6 +69,7 @@ export interface IChatFlow { apiConfig?: string category?: string type?: ChatflowType + workspaceId?: string } export interface IChatMessage { @@ -98,6 +114,7 @@ export interface ITool { func?: string updatedDate: Date createdDate: Date + workspaceId?: string } export interface IAssistant { @@ -107,6 +124,7 @@ export interface IAssistant { iconSrc?: string updatedDate: Date createdDate: Date + workspaceId?: string } export interface ICredential { @@ -116,6 +134,7 @@ export interface ICredential { encryptedData: string updatedDate: Date createdDate: Date + workspaceId?: string } export interface IVariable { @@ -125,6 +144,7 @@ export interface IVariable { type: string updatedDate: Date createdDate: Date + workspaceId?: string } export interface ILead { @@ -156,6 +176,7 @@ export interface IExecution { createdDate: Date updatedDate: Date stoppedDate: Date + workspaceId?: string } export interface IComponentNodes { @@ -311,6 +332,7 @@ export interface ICredentialReqBody { name: string credentialName: string plainDataObj: ICredentialDataDecrypted + workspaceId?: string } // Decrypted credential object sent back to client @@ -329,6 +351,7 @@ export interface IApiKey { apiKey: string apiSecret: string updatedDate: Date + workspaceId?: string } export interface ICustomTemplate { @@ -342,6 +365,7 @@ export interface ICustomTemplate { badge?: string framework?: string usecases?: string + workspaceId?: string } export interface IFlowConfig { @@ -361,14 +385,20 @@ export interface IPredictionQueueAppServer { sseStreamer: IServerSideEventStreamer telemetry: Telemetry cachePool: CachePool + usageCacheManager: UsageCacheManager } export interface IExecuteFlowParams extends IPredictionQueueAppServer { incomingInput: IncomingInput chatflow: IChatFlow chatId: string + orgId: string + workspaceId: string + subscriptionId: string baseURL: string isInternal: boolean + isEvaluation?: boolean + evaluationRunId?: string signal?: AbortController files?: Express.Multer.File[] fileUploads?: IFileUpload[] @@ -398,3 +428,6 @@ export interface IVariableOverride { // DocumentStore related export * from './Interface.DocumentStore' + +// Evaluations related +export * from './Interface.Evaluation' diff --git a/packages/server/src/StripeManager.ts b/packages/server/src/StripeManager.ts new file mode 100644 index 00000000000..278370ea1e8 --- /dev/null +++ b/packages/server/src/StripeManager.ts @@ -0,0 +1,606 @@ +import Stripe from 'stripe' +import { Request } from 'express' +import { UsageCacheManager } from './UsageCacheManager' +import { UserPlan } from './Interface' +import { LICENSE_QUOTAS } from './utils/constants' + +export class StripeManager { + private static instance: StripeManager + private stripe?: Stripe + private cacheManager: UsageCacheManager + + public static async getInstance(): Promise { + if (!StripeManager.instance) { + StripeManager.instance = new StripeManager() + await StripeManager.instance.initialize() + } + return StripeManager.instance + } + + private async initialize() { + if (!this.stripe && process.env.STRIPE_SECRET_KEY) { + this.stripe = new Stripe(process.env.STRIPE_SECRET_KEY) + } + this.cacheManager = await UsageCacheManager.getInstance() + } + + public getStripe() { + if (!this.stripe) throw new Error('Stripe is not initialized') + return this.stripe + } + + public getSubscriptionObject(subscription: Stripe.Response) { + return { + customer: subscription.customer, + status: subscription.status, + created: subscription.created + } + } + + public async getProductIdFromSubscription(subscriptionId: string) { + if (!this.stripe) { + throw new Error('Stripe is not initialized') + } + + const subscriptionData = await this.cacheManager.getSubscriptionDataFromCache(subscriptionId) + if (subscriptionData?.productId) { + return subscriptionData.productId + } + + try { + const subscription = await this.stripe.subscriptions.retrieve(subscriptionId) + const items = subscription.items.data + if (items.length === 0) { + return '' + } + + const productId = items[0].price.product as string + await this.cacheManager.updateSubscriptionDataToCache(subscriptionId, { + productId, + subsriptionDetails: this.getSubscriptionObject(subscription) + }) + + return productId + } catch (error) { + console.error('Error getting product ID from subscription:', error) + throw error + } + } + + public async getFeaturesByPlan(subscriptionId: string, withoutCache: boolean = false) { + if (!this.stripe || !subscriptionId) { + return {} + } + + if (!withoutCache) { + const subscriptionData = await this.cacheManager.getSubscriptionDataFromCache(subscriptionId) + if (subscriptionData?.features) { + return subscriptionData.features + } + } + + const subscription = await this.stripe.subscriptions.retrieve(subscriptionId, { + timeout: 5000 + }) + const items = subscription.items.data + if (items.length === 0) { + return {} + } + + const productId = items[0].price.product as string + const product = await this.stripe.products.retrieve(productId, { + timeout: 5000 + }) + const productMetadata = product.metadata + + if (!productMetadata || Object.keys(productMetadata).length === 0) { + return {} + } + + const features: Record = {} + for (const key in productMetadata) { + if (key.startsWith('feat:')) { + features[key] = productMetadata[key] + } + } + + await this.cacheManager.updateSubscriptionDataToCache(subscriptionId, { + features, + subsriptionDetails: this.getSubscriptionObject(subscription) + }) + + return features + } + + public async createStripeCustomerPortalSession(req: Request) { + if (!this.stripe) { + throw new Error('Stripe is not initialized') + } + + const customerId = req.user?.activeOrganizationCustomerId + if (!customerId) { + throw new Error('Customer ID is required') + } + + const subscriptionId = req.user?.activeOrganizationSubscriptionId + if (!subscriptionId) { + throw new Error('Subscription ID is required') + } + + try { + const prodPriceIds = await this.getPriceIds() + const configuration = await this.createPortalConfiguration(prodPriceIds) + + const portalSession = await this.stripe.billingPortal.sessions.create({ + customer: customerId, + configuration: configuration.id, + return_url: `${process.env.APP_URL}/account` + /* We can't have flow_data because it does not support multiple subscription items + flow_data: { + type: 'subscription_update', + subscription_update: { + subscription: subscriptionId + }, + after_completion: { + type: 'redirect', + redirect: { + return_url: `${process.env.APP_URL}/account/subscription?subscriptionId=${subscriptionId}` + } + } + }*/ + }) + + return { url: portalSession.url } + } catch (error) { + console.error('Error creating customer portal session:', error) + throw error + } + } + + private async getPriceIds() { + const prodPriceIds: Record = { + [UserPlan.STARTER]: { + product: process.env.CLOUD_STARTER_ID as string, + price: '' + }, + [UserPlan.PRO]: { + product: process.env.CLOUD_PRO_ID as string, + price: '' + }, + [UserPlan.FREE]: { + product: process.env.CLOUD_FREE_ID as string, + price: '' + }, + SEAT: { + product: process.env.ADDITIONAL_SEAT_ID as string, + price: '' + } + } + + for (const key in prodPriceIds) { + const prices = await this.stripe!.prices.list({ + product: prodPriceIds[key].product, + active: true, + limit: 1 + }) + + if (prices.data.length) { + prodPriceIds[key].price = prices.data[0].id + } + } + + return prodPriceIds + } + + private async createPortalConfiguration(_: Record) { + return await this.stripe!.billingPortal.configurations.create({ + business_profile: { + privacy_policy_url: `${process.env.APP_URL}/privacy-policy`, + terms_of_service_url: `${process.env.APP_URL}/terms-of-service` + }, + features: { + invoice_history: { + enabled: true + }, + payment_method_update: { + enabled: true + }, + subscription_cancel: { + enabled: false + } + /*subscription_update: { + enabled: false, + default_allowed_updates: ['price'], + products: [ + { + product: prodPriceIds[UserPlan.FREE].product, + prices: [prodPriceIds[UserPlan.FREE].price] + }, + { + product: prodPriceIds[UserPlan.STARTER].product, + prices: [prodPriceIds[UserPlan.STARTER].price] + }, + { + product: prodPriceIds[UserPlan.PRO].product, + prices: [prodPriceIds[UserPlan.PRO].price] + } + ], + proration_behavior: 'always_invoice' + }*/ + } + }) + } + + public async getAdditionalSeatsQuantity(subscriptionId: string): Promise<{ quantity: number; includedSeats: number }> { + if (!this.stripe) { + throw new Error('Stripe is not initialized') + } + + try { + const subscription = await this.stripe.subscriptions.retrieve(subscriptionId) + const additionalSeatsItem = subscription.items.data.find( + (item) => (item.price.product as string) === process.env.ADDITIONAL_SEAT_ID + ) + const quotas = await this.cacheManager.getQuotas(subscriptionId) + + return { quantity: additionalSeatsItem?.quantity || 0, includedSeats: quotas[LICENSE_QUOTAS.USERS_LIMIT] } + } catch (error) { + console.error('Error getting additional seats quantity:', error) + throw error + } + } + + public async getCustomerWithDefaultSource(customerId: string) { + if (!this.stripe) { + throw new Error('Stripe is not initialized') + } + + try { + const customer = (await this.stripe.customers.retrieve(customerId, { + expand: ['default_source', 'invoice_settings.default_payment_method'] + })) as Stripe.Customer + + return customer + } catch (error) { + console.error('Error retrieving customer with default source:', error) + throw error + } + } + + public async getAdditionalSeatsProration(subscriptionId: string, quantity: number) { + if (!this.stripe) { + throw new Error('Stripe is not initialized') + } + + try { + const subscription = await this.stripe.subscriptions.retrieve(subscriptionId) + + // Get customer's credit balance + const customer = await this.stripe.customers.retrieve(subscription.customer as string) + const creditBalance = (customer as Stripe.Customer).balance // Balance is in cents, negative for credit, positive for amount owed + + // Get the current subscription's base price (without seats) + const basePlanItem = subscription.items.data.find((item) => (item.price.product as string) !== process.env.ADDITIONAL_SEAT_ID) + const basePlanAmount = basePlanItem ? basePlanItem.price.unit_amount! * 1 : 0 + + const existingInvoice = await this.stripe.invoices.retrieveUpcoming({ + customer: subscription.customer as string, + subscription: subscriptionId + }) + + const existingInvoiceTotal = existingInvoice.total + + // Get the price ID for additional seats + const prices = await this.stripe.prices.list({ + product: process.env.ADDITIONAL_SEAT_ID, + active: true, + limit: 1 + }) + + if (prices.data.length === 0) { + throw new Error('No active price found for additional seats') + } + + const seatPrice = prices.data[0] + const pricePerSeat = seatPrice.unit_amount || 0 + + // Use current timestamp for proration calculation + const prorationDate = Math.floor(Date.now() / 1000) + + const additionalSeatsItem = subscription.items.data.find( + (item) => (item.price.product as string) === process.env.ADDITIONAL_SEAT_ID + ) + + const upcomingInvoice = await this.stripe.invoices.retrieveUpcoming({ + customer: subscription.customer as string, + subscription: subscriptionId, + subscription_details: { + proration_behavior: 'always_invoice', + proration_date: prorationDate, + items: [ + additionalSeatsItem + ? { + id: additionalSeatsItem.id, + quantity: quantity + } + : { + // If the item doesn't exist yet, create a new one + // This will be used to calculate the proration amount + price: prices.data[0].id, + quantity: quantity + } + ] + } + }) + + // Calculate proration amount from the relevant line items + // Only consider prorations that match our proration date + const prorationLineItems = upcomingInvoice.lines.data.filter( + (line) => line.type === 'invoiceitem' && line.period.start === prorationDate + ) + + const prorationAmount = prorationLineItems.reduce((total, item) => total + item.amount, 0) + + return { + basePlanAmount: basePlanAmount / 100, + additionalSeatsProratedAmount: (existingInvoiceTotal + prorationAmount - basePlanAmount) / 100, + seatPerUnitPrice: pricePerSeat / 100, + prorationAmount: prorationAmount / 100, + creditBalance: creditBalance / 100, + nextInvoiceTotal: (existingInvoiceTotal + prorationAmount) / 100, + currency: upcomingInvoice.currency.toUpperCase(), + prorationDate, + currentPeriodStart: subscription.current_period_start, + currentPeriodEnd: subscription.current_period_end + } + } catch (error) { + console.error('Error calculating additional seats proration:', error) + throw error + } + } + + public async updateAdditionalSeats(subscriptionId: string, quantity: number, prorationDate: number) { + if (!this.stripe) { + throw new Error('Stripe is not initialized') + } + + try { + const subscription = await this.stripe.subscriptions.retrieve(subscriptionId) + const additionalSeatsItem = subscription.items.data.find( + (item) => (item.price.product as string) === process.env.ADDITIONAL_SEAT_ID + ) + + // Get the price ID for additional seats if needed + const prices = await this.stripe.prices.list({ + product: process.env.ADDITIONAL_SEAT_ID, + active: true, + limit: 1 + }) + + if (prices.data.length === 0) { + throw new Error('No active price found for additional seats') + } + + // Create an invoice immediately for the proration + const updatedSubscription = await this.stripe.subscriptions.update(subscriptionId, { + items: [ + additionalSeatsItem + ? { + id: additionalSeatsItem.id, + quantity: quantity + } + : { + price: prices.data[0].id, + quantity: quantity + } + ], + proration_behavior: 'always_invoice', + proration_date: prorationDate + }) + + // Get the latest invoice for this subscription + const invoice = await this.stripe.invoices.list({ + subscription: subscriptionId, + limit: 1 + }) + + if (invoice.data.length > 0) { + const latestInvoice = invoice.data[0] + // Only try to pay if the invoice is not already paid + if (latestInvoice.status !== 'paid') { + await this.stripe.invoices.pay(latestInvoice.id) + } + } + + return { + success: true, + subscription: updatedSubscription, + invoice: invoice.data[0] + } + } catch (error) { + console.error('Error updating additional seats:', error) + throw error + } + } + + public async getPlanProration(subscriptionId: string, newPlanId: string) { + if (!this.stripe) { + throw new Error('Stripe is not initialized') + } + + try { + const subscription = await this.stripe.subscriptions.retrieve(subscriptionId) + const customerId = subscription.customer as string + + // Get customer's credit balance and metadata + const customer = await this.stripe.customers.retrieve(customerId) + const creditBalance = (customer as Stripe.Customer).balance + const customerMetadata = (customer as Stripe.Customer).metadata || {} + + // Get the price ID for the new plan + const prices = await this.stripe.prices.list({ + product: newPlanId, + active: true, + limit: 1 + }) + + if (prices.data.length === 0) { + throw new Error('No active price found for the selected plan') + } + + const newPlan = prices.data[0] + const newPlanPrice = newPlan.unit_amount || 0 + + // Check if this is the STARTER plan and eligible for first month free + const isStarterPlan = newPlanId === process.env.CLOUD_STARTER_ID + const hasUsedFirstMonthFreeCoupon = customerMetadata.has_used_first_month_free === 'true' + const eligibleForFirstMonthFree = isStarterPlan && !hasUsedFirstMonthFreeCoupon + + // Use current timestamp for proration calculation + const prorationDate = Math.floor(Date.now() / 1000) + + const upcomingInvoice = await this.stripe.invoices.retrieveUpcoming({ + customer: customerId, + subscription: subscriptionId, + subscription_details: { + proration_behavior: 'always_invoice', + proration_date: prorationDate, + items: [ + { + id: subscription.items.data[0].id, + price: newPlan.id + } + ] + } + }) + + let prorationAmount = upcomingInvoice.lines.data.reduce((total, item) => total + item.amount, 0) + if (eligibleForFirstMonthFree) { + prorationAmount = 0 + } + + return { + newPlanAmount: newPlanPrice / 100, + prorationAmount: prorationAmount / 100, + creditBalance: creditBalance / 100, + currency: upcomingInvoice.currency.toUpperCase(), + prorationDate, + currentPeriodStart: subscription.current_period_start, + currentPeriodEnd: subscription.current_period_end, + eligibleForFirstMonthFree + } + } catch (error) { + console.error('Error calculating plan proration:', error) + throw error + } + } + + public async updateSubscriptionPlan(subscriptionId: string, newPlanId: string, prorationDate: number) { + if (!this.stripe) { + throw new Error('Stripe is not initialized') + } + + try { + const subscription = await this.stripe.subscriptions.retrieve(subscriptionId) + const customerId = subscription.customer as string + + // Get customer details and metadata + const customer = await this.stripe.customers.retrieve(customerId) + const customerMetadata = (customer as Stripe.Customer).metadata || {} + + // Get the price ID for the new plan + const prices = await this.stripe.prices.list({ + product: newPlanId, + active: true, + limit: 1 + }) + + if (prices.data.length === 0) { + throw new Error('No active price found for the selected plan') + } + + const newPlan = prices.data[0] + let updatedSubscription: Stripe.Response + + // Check if this is an upgrade to CLOUD_STARTER_ID and eligible for first month free + const isStarterPlan = newPlanId === process.env.CLOUD_STARTER_ID + const hasUsedFirstMonthFreeCoupon = customerMetadata.has_used_first_month_free === 'true' + + if (isStarterPlan && !hasUsedFirstMonthFreeCoupon) { + // Create the one-time 100% off coupon + const coupon = await this.stripe.coupons.create({ + duration: 'once', + percent_off: 100, + max_redemptions: 1, + metadata: { + type: 'first_month_free', + customer_id: customerId, + plan_id: process.env.CLOUD_STARTER_ID || '' + } + }) + + // Create a promotion code linked to the coupon + const promotionCode = await this.stripe.promotionCodes.create({ + coupon: coupon.id, + max_redemptions: 1 + }) + + // Update the subscription with the new plan and apply the promotion code + updatedSubscription = await this.stripe.subscriptions.update(subscriptionId, { + items: [ + { + id: subscription.items.data[0].id, + price: newPlan.id + } + ], + proration_behavior: 'always_invoice', + proration_date: prorationDate, + promotion_code: promotionCode.id + }) + + // Update customer metadata to mark the coupon as used + await this.stripe.customers.update(customerId, { + metadata: { + ...customerMetadata, + has_used_first_month_free: 'true', + first_month_free_date: new Date().toISOString() + } + }) + } else { + // Regular plan update without coupon + updatedSubscription = await this.stripe.subscriptions.update(subscriptionId, { + items: [ + { + id: subscription.items.data[0].id, + price: newPlan.id + } + ], + proration_behavior: 'always_invoice', + proration_date: prorationDate + }) + } + + // Get and pay the latest invoice + const invoice = await this.stripe.invoices.list({ + subscription: subscriptionId, + limit: 1 + }) + + if (invoice.data.length > 0) { + const latestInvoice = invoice.data[0] + if (latestInvoice.status !== 'paid') { + await this.stripe.invoices.pay(latestInvoice.id) + } + } + + return { + success: true, + subscription: updatedSubscription, + invoice: invoice.data[0] + } + } catch (error) { + console.error('Error updating subscription plan:', error) + throw error + } + } +} diff --git a/packages/server/src/UsageCacheManager.ts b/packages/server/src/UsageCacheManager.ts new file mode 100644 index 00000000000..583fe544578 --- /dev/null +++ b/packages/server/src/UsageCacheManager.ts @@ -0,0 +1,213 @@ +import { Keyv } from 'keyv' +import KeyvRedis from '@keyv/redis' +import { Cache, createCache } from 'cache-manager' +import { MODE } from './Interface' +import { LICENSE_QUOTAS } from './utils/constants' +import { StripeManager } from './StripeManager' + +const DISABLED_QUOTAS = { + [LICENSE_QUOTAS.PREDICTIONS_LIMIT]: 0, + [LICENSE_QUOTAS.STORAGE_LIMIT]: 0, // in MB + [LICENSE_QUOTAS.FLOWS_LIMIT]: 0, + [LICENSE_QUOTAS.USERS_LIMIT]: 0, + [LICENSE_QUOTAS.ADDITIONAL_SEATS_LIMIT]: 0 +} + +const UNLIMITED_QUOTAS = { + [LICENSE_QUOTAS.PREDICTIONS_LIMIT]: -1, + [LICENSE_QUOTAS.STORAGE_LIMIT]: -1, + [LICENSE_QUOTAS.FLOWS_LIMIT]: -1, + [LICENSE_QUOTAS.USERS_LIMIT]: -1, + [LICENSE_QUOTAS.ADDITIONAL_SEATS_LIMIT]: -1 +} + +export class UsageCacheManager { + private cache: Cache + private static instance: UsageCacheManager + + public static async getInstance(): Promise { + if (!UsageCacheManager.instance) { + UsageCacheManager.instance = new UsageCacheManager() + await UsageCacheManager.instance.initialize() + } + return UsageCacheManager.instance + } + + private async initialize(): Promise { + if (process.env.MODE === MODE.QUEUE) { + let redisConfig: string | Record + if (process.env.REDIS_URL) { + redisConfig = process.env.REDIS_URL + } else { + redisConfig = { + username: process.env.REDIS_USERNAME || undefined, + password: process.env.REDIS_PASSWORD || undefined, + socket: { + host: process.env.REDIS_HOST || 'localhost', + port: parseInt(process.env.REDIS_PORT || '6379'), + tls: process.env.REDIS_TLS === 'true', + cert: process.env.REDIS_CERT ? Buffer.from(process.env.REDIS_CERT, 'base64') : undefined, + key: process.env.REDIS_KEY ? Buffer.from(process.env.REDIS_KEY, 'base64') : undefined, + ca: process.env.REDIS_CA ? Buffer.from(process.env.REDIS_CA, 'base64') : undefined + } + } + } + this.cache = createCache({ + stores: [ + new Keyv({ + store: new KeyvRedis(redisConfig) + }) + ] + }) + } else { + this.cache = createCache() + } + } + + public async getSubscriptionDetails(subscriptionId: string, withoutCache: boolean = false): Promise> { + const stripeManager = await StripeManager.getInstance() + if (!stripeManager || !subscriptionId) { + return UNLIMITED_QUOTAS + } + + // Skip cache if withoutCache is true + if (!withoutCache) { + const subscriptionData = await this.getSubscriptionDataFromCache(subscriptionId) + if (subscriptionData?.subsriptionDetails) { + return subscriptionData.subsriptionDetails + } + } + + // If not in cache, retrieve from Stripe + const subscription = await stripeManager.getStripe().subscriptions.retrieve(subscriptionId) + + // Update subscription data cache + await this.updateSubscriptionDataToCache(subscriptionId, { subsriptionDetails: stripeManager.getSubscriptionObject(subscription) }) + + return stripeManager.getSubscriptionObject(subscription) + } + + public async getQuotas(subscriptionId: string, withoutCache: boolean = false): Promise> { + const stripeManager = await StripeManager.getInstance() + if (!stripeManager || !subscriptionId) { + return UNLIMITED_QUOTAS + } + + // Skip cache if withoutCache is true + if (!withoutCache) { + const subscriptionData = await this.getSubscriptionDataFromCache(subscriptionId) + if (subscriptionData?.quotas) { + return subscriptionData.quotas + } + } + + // If not in cache, retrieve from Stripe + const subscription = await stripeManager.getStripe().subscriptions.retrieve(subscriptionId) + const items = subscription.items.data + if (items.length === 0) { + return DISABLED_QUOTAS + } + + const productId = items[0].price.product as string + const product = await stripeManager.getStripe().products.retrieve(productId) + const productMetadata = product.metadata + + if (!productMetadata || Object.keys(productMetadata).length === 0) { + return DISABLED_QUOTAS + } + + const quotas: Record = {} + for (const key in productMetadata) { + if (key.startsWith('quota:')) { + quotas[key] = parseInt(productMetadata[key]) + } + } + + const additionalSeatsItem = subscription.items.data.find( + (item) => (item.price.product as string) === process.env.ADDITIONAL_SEAT_ID + ) + quotas[LICENSE_QUOTAS.ADDITIONAL_SEATS_LIMIT] = additionalSeatsItem?.quantity || 0 + + // Update subscription data cache with quotas + await this.updateSubscriptionDataToCache(subscriptionId, { + quotas, + subsriptionDetails: stripeManager.getSubscriptionObject(subscription) + }) + + return quotas + } + + public async getSubscriptionDataFromCache(subscriptionId: string) { + const cacheKey = `subscription:${subscriptionId}` + return await this.get<{ + quotas?: Record + productId?: string + features?: Record + subsriptionDetails?: Record + }>(cacheKey) + } + + public async updateSubscriptionDataToCache( + subscriptionId: string, + data: Partial<{ + quotas: Record + productId: string + features: Record + subsriptionDetails: Record + }> + ) { + const cacheKey = `subscription:${subscriptionId}` + const existingData = (await this.getSubscriptionDataFromCache(subscriptionId)) || {} + const updatedData = { ...existingData, ...data } + this.set(cacheKey, updatedData, 3600000) // Cache for 1 hour + } + + public async get(key: string): Promise { + if (!this.cache) await this.initialize() + const value = await this.cache.get(key) + return value + } + + public async getTTL(key: string): Promise { + if (!this.cache) await this.initialize() + const value = await this.cache.ttl(key) + return value + } + + public async mget(keys: string[]): Promise<(T | null)[]> { + if (this.cache) { + const values = await this.cache.mget(keys) + return values + } else { + return [] + } + } + + public set(key: string, value: T, ttl?: number) { + if (this.cache) { + this.cache.set(key, value, ttl) + } + } + + public mset(keys: [{ key: string; value: T; ttl: number }]) { + if (this.cache) { + this.cache.mset(keys) + } + } + + public async del(key: string): Promise { + await this.cache.del(key) + } + + public async mdel(keys: string[]): Promise { + await this.cache.mdel(keys) + } + + public async clear(): Promise { + await this.cache.clear() + } + + public async wrap(key: string, fn: () => Promise, ttl?: number): Promise { + return this.cache.wrap(key, fn, ttl) + } +} diff --git a/packages/server/src/commands/base.ts b/packages/server/src/commands/base.ts index 9f0cd46f2e9..86222497bec 100644 --- a/packages/server/src/commands/base.ts +++ b/packages/server/src/commands/base.ts @@ -12,16 +12,12 @@ enum EXIT_CODE { export abstract class BaseCommand extends Command { static flags = { - FLOWISE_USERNAME: Flags.string(), - FLOWISE_PASSWORD: Flags.string(), FLOWISE_FILE_SIZE_LIMIT: Flags.string(), PORT: Flags.string(), CORS_ORIGINS: Flags.string(), IFRAME_ORIGINS: Flags.string(), DEBUG: Flags.string(), BLOB_STORAGE_PATH: Flags.string(), - APIKEY_STORAGE_TYPE: Flags.string(), - APIKEY_PATH: Flags.string(), LOG_PATH: Flags.string(), LOG_LEVEL: Flags.string(), TOOL_FUNCTION_BUILTIN_DEP: Flags.string(), @@ -59,6 +55,7 @@ export abstract class BaseCommand extends Command { SECRETKEY_AWS_ACCESS_KEY: Flags.string(), SECRETKEY_AWS_SECRET_KEY: Flags.string(), SECRETKEY_AWS_REGION: Flags.string(), + SECRETKEY_AWS_NAME: Flags.string(), DISABLED_NODES: Flags.string(), MODE: Flags.string(), WORKER_CONCURRENCY: Flags.string(), @@ -131,14 +128,6 @@ export abstract class BaseCommand extends Command { if (flags.NUMBER_OF_PROXIES) process.env.NUMBER_OF_PROXIES = flags.NUMBER_OF_PROXIES if (flags.SHOW_COMMUNITY_NODES) process.env.SHOW_COMMUNITY_NODES = flags.SHOW_COMMUNITY_NODES if (flags.DISABLED_NODES) process.env.DISABLED_NODES = flags.DISABLED_NODES - - // Authorization - if (flags.FLOWISE_USERNAME) process.env.FLOWISE_USERNAME = flags.FLOWISE_USERNAME - if (flags.FLOWISE_PASSWORD) process.env.FLOWISE_PASSWORD = flags.FLOWISE_PASSWORD - if (flags.APIKEY_STORAGE_TYPE) process.env.APIKEY_STORAGE_TYPE = flags.APIKEY_STORAGE_TYPE - if (flags.APIKEY_PATH) process.env.APIKEY_PATH = flags.APIKEY_PATH - - // API Configuration if (flags.FLOWISE_FILE_SIZE_LIMIT) process.env.FLOWISE_FILE_SIZE_LIMIT = flags.FLOWISE_FILE_SIZE_LIMIT // Credentials @@ -148,6 +137,7 @@ export abstract class BaseCommand extends Command { if (flags.SECRETKEY_AWS_ACCESS_KEY) process.env.SECRETKEY_AWS_ACCESS_KEY = flags.SECRETKEY_AWS_ACCESS_KEY if (flags.SECRETKEY_AWS_SECRET_KEY) process.env.SECRETKEY_AWS_SECRET_KEY = flags.SECRETKEY_AWS_SECRET_KEY if (flags.SECRETKEY_AWS_REGION) process.env.SECRETKEY_AWS_REGION = flags.SECRETKEY_AWS_REGION + if (flags.SECRETKEY_AWS_NAME) process.env.SECRETKEY_AWS_NAME = flags.SECRETKEY_AWS_NAME // Logs if (flags.LOG_PATH) process.env.LOG_PATH = flags.LOG_PATH diff --git a/packages/server/src/commands/worker.ts b/packages/server/src/commands/worker.ts index ce0a304396f..e993c73608e 100644 --- a/packages/server/src/commands/worker.ts +++ b/packages/server/src/commands/worker.ts @@ -7,6 +7,7 @@ import { NodesPool } from '../NodesPool' import { CachePool } from '../CachePool' import { QueueEvents, QueueEventsListener } from 'bullmq' import { AbortControllerPool } from '../AbortControllerPool' +import { UsageCacheManager } from '../UsageCacheManager' interface CustomListener extends QueueEventsListener { abort: (args: { id: string }, id: string) => void @@ -19,7 +20,7 @@ export default class Worker extends BaseCommand { async run(): Promise { logger.info('Starting Flowise Worker...') - const { appDataSource, telemetry, componentNodes, cachePool, abortControllerPool } = await this.prepareData() + const { appDataSource, telemetry, componentNodes, cachePool, abortControllerPool, usageCacheManager } = await this.prepareData() const queueManager = QueueManager.getInstance() queueManager.setupAllQueues({ @@ -27,7 +28,8 @@ export default class Worker extends BaseCommand { telemetry, cachePool, appDataSource, - abortControllerPool + abortControllerPool, + usageCacheManager }) /** Prediction */ @@ -72,7 +74,10 @@ export default class Worker extends BaseCommand { // Initialize cache pool const cachePool = new CachePool() - return { appDataSource, telemetry, componentNodes: nodesPool.componentNodes, cachePool, abortControllerPool } + // Initialize usage cache manager + const usageCacheManager = await UsageCacheManager.getInstance() + + return { appDataSource, telemetry, componentNodes: nodesPool.componentNodes, cachePool, abortControllerPool, usageCacheManager } } async catch(error: Error) { diff --git a/packages/server/src/controllers/apikey/index.ts b/packages/server/src/controllers/apikey/index.ts index 40452b71999..029af1e5e5c 100644 --- a/packages/server/src/controllers/apikey/index.ts +++ b/packages/server/src/controllers/apikey/index.ts @@ -6,7 +6,8 @@ import apikeyService from '../../services/apikey' // Get api keys const getAllApiKeys = async (req: Request, res: Response, next: NextFunction) => { try { - const apiResponse = await apikeyService.getAllApiKeys() + const autoCreateNewKey = true + const apiResponse = await apikeyService.getAllApiKeys(req.user?.activeWorkspaceId, autoCreateNewKey) return res.json(apiResponse) } catch (error) { next(error) @@ -18,7 +19,7 @@ const createApiKey = async (req: Request, res: Response, next: NextFunction) => if (typeof req.body === 'undefined' || !req.body.keyName) { throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: apikeyController.createApiKey - keyName not provided!`) } - const apiResponse = await apikeyService.createApiKey(req.body.keyName) + const apiResponse = await apikeyService.createApiKey(req.body.keyName, req.user?.activeWorkspaceId) return res.json(apiResponse) } catch (error) { next(error) @@ -34,7 +35,7 @@ const updateApiKey = async (req: Request, res: Response, next: NextFunction) => if (typeof req.body === 'undefined' || !req.body.keyName) { throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: apikeyController.updateApiKey - keyName not provided!`) } - const apiResponse = await apikeyService.updateApiKey(req.params.id, req.body.keyName) + const apiResponse = await apikeyService.updateApiKey(req.params.id, req.body.keyName, req.user?.activeWorkspaceId) return res.json(apiResponse) } catch (error) { next(error) @@ -47,6 +48,7 @@ const importKeys = async (req: Request, res: Response, next: NextFunction) => { if (typeof req.body === 'undefined' || !req.body.jsonFile) { throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: apikeyController.importKeys - body not provided!`) } + req.body.workspaceId = req.user?.activeWorkspaceId const apiResponse = await apikeyService.importKeys(req.body) return res.json(apiResponse) } catch (error) { @@ -60,7 +62,7 @@ const deleteApiKey = async (req: Request, res: Response, next: NextFunction) => if (typeof req.params === 'undefined' || !req.params.id) { throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: apikeyController.deleteApiKey - id not provided!`) } - const apiResponse = await apikeyService.deleteApiKey(req.params.id) + const apiResponse = await apikeyService.deleteApiKey(req.params.id, req.user?.activeWorkspaceId) return res.json(apiResponse) } catch (error) { next(error) diff --git a/packages/server/src/controllers/assistants/index.ts b/packages/server/src/controllers/assistants/index.ts index 2180a4c7f12..324907d0b2c 100644 --- a/packages/server/src/controllers/assistants/index.ts +++ b/packages/server/src/controllers/assistants/index.ts @@ -1,8 +1,10 @@ -import { Request, Response, NextFunction } from 'express' -import assistantsService from '../../services/assistants' -import { InternalFlowiseError } from '../../errors/internalFlowiseError' +import { NextFunction, Request, Response } from 'express' import { StatusCodes } from 'http-status-codes' +import { InternalFlowiseError } from '../../errors/internalFlowiseError' import { AssistantType } from '../../Interface' +import assistantsService from '../../services/assistants' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' +import { checkUsageLimit } from '../../utils/quotaUsage' const createAssistant = async (req: Request, res: Response, next: NextFunction) => { try { @@ -12,7 +14,30 @@ const createAssistant = async (req: Request, res: Response, next: NextFunction) `Error: assistantsController.createAssistant - body not provided!` ) } - const apiResponse = await assistantsService.createAssistant(req.body) + const body = req.body + const orgId = req.user?.activeOrganizationId + if (!orgId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: assistantsController.createAssistant - organization ${orgId} not found!` + ) + } + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: assistantsController.createAssistant - workspace ${workspaceId} not found!` + ) + } + const subscriptionId = req.user?.activeOrganizationSubscriptionId || '' + + const existingAssistantCount = await assistantsService.getAssistantsCountByOrganization(body.type, orgId) + const newAssistantCount = 1 + await checkUsageLimit('flows', subscriptionId, getRunningExpressApp().usageCacheManager, existingAssistantCount + newAssistantCount) + + body.workspaceId = workspaceId + const apiResponse = await assistantsService.createAssistant(body, orgId) + return res.json(apiResponse) } catch (error) { next(error) @@ -37,7 +62,7 @@ const deleteAssistant = async (req: Request, res: Response, next: NextFunction) const getAllAssistants = async (req: Request, res: Response, next: NextFunction) => { try { const type = req.query.type as AssistantType - const apiResponse = await assistantsService.getAllAssistants(type) + const apiResponse = await assistantsService.getAllAssistants(type, req.user?.activeWorkspaceId) return res.json(apiResponse) } catch (error) { next(error) @@ -91,7 +116,7 @@ const getChatModels = async (req: Request, res: Response, next: NextFunction) => const getDocumentStores = async (req: Request, res: Response, next: NextFunction) => { try { - const apiResponse = await assistantsService.getDocumentStores() + const apiResponse = await assistantsService.getDocumentStores(req.user?.activeWorkspaceId) return res.json(apiResponse) } catch (error) { next(error) diff --git a/packages/server/src/controllers/chat-messages/index.ts b/packages/server/src/controllers/chat-messages/index.ts index d5c339f8357..f5d3dd04488 100644 --- a/packages/server/src/controllers/chat-messages/index.ts +++ b/packages/server/src/controllers/chat-messages/index.ts @@ -62,6 +62,7 @@ const getAllChatMessages = async (req: Request, res: Response, next: NextFunctio chatTypes = [_chatTypes as ChatType] } } + const activeWorkspaceId = req.user?.activeWorkspaceId const sortOrder = req.query?.order as string | undefined const chatId = req.query?.chatId as string | undefined const memoryType = req.query?.memoryType as string | undefined @@ -91,9 +92,9 @@ const getAllChatMessages = async (req: Request, res: Response, next: NextFunctio endDate, messageId, feedback, - feedbackTypeFilters + feedbackTypeFilters, + activeWorkspaceId ) - return res.json(parseAPIResponse(apiResponse)) } catch (error) { next(error) @@ -102,6 +103,7 @@ const getAllChatMessages = async (req: Request, res: Response, next: NextFunctio const getAllInternalChatMessages = async (req: Request, res: Response, next: NextFunction) => { try { + const activeWorkspaceId = req.user?.activeWorkspaceId const sortOrder = req.query?.order as string | undefined const chatId = req.query?.chatId as string | undefined const memoryType = req.query?.memoryType as string | undefined @@ -125,7 +127,8 @@ const getAllInternalChatMessages = async (req: Request, res: Response, next: Nex endDate, messageId, feedback, - feedbackTypeFilters + feedbackTypeFilters, + activeWorkspaceId ) return res.json(parseAPIResponse(apiResponse)) } catch (error) { @@ -142,6 +145,20 @@ const removeAllChatMessages = async (req: Request, res: Response, next: NextFunc 'Error: chatMessagesController.removeAllChatMessages - id not provided!' ) } + const orgId = req.user?.activeOrganizationId + if (!orgId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: chatMessagesController.removeAllChatMessages - organization ${orgId} not found!` + ) + } + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: chatMessagesController.removeAllChatMessages - workspace ${workspaceId} not found!` + ) + } const chatflowid = req.params.id const chatflow = await chatflowsService.getChatflowById(req.params.id) if (!chatflow) { @@ -177,6 +194,7 @@ const removeAllChatMessages = async (req: Request, res: Response, next: NextFunc if (!chatId) { const isFeedback = feedbackTypeFilters?.length ? true : false const hardDelete = req.query?.hardDelete as boolean | undefined + const messages = await utilGetChatMessage({ chatflowid, chatTypes, @@ -216,6 +234,7 @@ const removeAllChatMessages = async (req: Request, res: Response, next: NextFunc appServer.nodesPool.componentNodes, chatId, appServer.AppDataSource, + orgId, sessionId, memoryType, isClearFromViewMessageDialog @@ -226,7 +245,14 @@ const removeAllChatMessages = async (req: Request, res: Response, next: NextFunc } } - const apiResponse = await chatMessagesService.removeChatMessagesByMessageIds(chatflowid, chatIdMap, messageIds) + const apiResponse = await chatMessagesService.removeChatMessagesByMessageIds( + chatflowid, + chatIdMap, + messageIds, + orgId, + workspaceId, + appServer.usageCacheManager + ) return res.json(apiResponse) } else { try { @@ -235,6 +261,7 @@ const removeAllChatMessages = async (req: Request, res: Response, next: NextFunc appServer.nodesPool.componentNodes, chatId, appServer.AppDataSource, + orgId, sessionId, memoryType, isClearFromViewMessageDialog @@ -255,7 +282,14 @@ const removeAllChatMessages = async (req: Request, res: Response, next: NextFunc const toDate = new Date(endDate) deleteOptions.createdDate = Between(fromDate ?? aMonthAgo(), toDate ?? new Date()) } - const apiResponse = await chatMessagesService.removeAllChatMessages(chatId, chatflowid, deleteOptions) + const apiResponse = await chatMessagesService.removeAllChatMessages( + chatId, + chatflowid, + deleteOptions, + orgId, + workspaceId, + appServer.usageCacheManager + ) return res.json(apiResponse) } } catch (error) { @@ -282,26 +316,30 @@ const parseAPIResponse = (apiResponse: ChatMessage | ChatMessage[]): ChatMessage const parseResponse = (response: ChatMessage): ChatMessage => { const parsedResponse = { ...response } - if (parsedResponse.sourceDocuments) { - parsedResponse.sourceDocuments = JSON.parse(parsedResponse.sourceDocuments) - } - if (parsedResponse.usedTools) { - parsedResponse.usedTools = JSON.parse(parsedResponse.usedTools) - } - if (parsedResponse.fileAnnotations) { - parsedResponse.fileAnnotations = JSON.parse(parsedResponse.fileAnnotations) - } - if (parsedResponse.agentReasoning) { - parsedResponse.agentReasoning = JSON.parse(parsedResponse.agentReasoning) - } - if (parsedResponse.fileUploads) { - parsedResponse.fileUploads = JSON.parse(parsedResponse.fileUploads) - } - if (parsedResponse.action) { - parsedResponse.action = JSON.parse(parsedResponse.action) - } - if (parsedResponse.artifacts) { - parsedResponse.artifacts = JSON.parse(parsedResponse.artifacts) + try { + if (parsedResponse.sourceDocuments) { + parsedResponse.sourceDocuments = JSON.parse(parsedResponse.sourceDocuments) + } + if (parsedResponse.usedTools) { + parsedResponse.usedTools = JSON.parse(parsedResponse.usedTools) + } + if (parsedResponse.fileAnnotations) { + parsedResponse.fileAnnotations = JSON.parse(parsedResponse.fileAnnotations) + } + if (parsedResponse.agentReasoning) { + parsedResponse.agentReasoning = JSON.parse(parsedResponse.agentReasoning) + } + if (parsedResponse.fileUploads) { + parsedResponse.fileUploads = JSON.parse(parsedResponse.fileUploads) + } + if (parsedResponse.action) { + parsedResponse.action = JSON.parse(parsedResponse.action) + } + if (parsedResponse.artifacts) { + parsedResponse.artifacts = JSON.parse(parsedResponse.artifacts) + } + } catch (e) { + console.error('Error parsing chat message response', e) } return parsedResponse diff --git a/packages/server/src/controllers/chatflows/index.ts b/packages/server/src/controllers/chatflows/index.ts index 930272cc5e2..7d5fe04df9b 100644 --- a/packages/server/src/controllers/chatflows/index.ts +++ b/packages/server/src/controllers/chatflows/index.ts @@ -1,18 +1,20 @@ import { NextFunction, Request, Response } from 'express' import { StatusCodes } from 'http-status-codes' -import apiKeyService from '../../services/apikey' import { ChatFlow } from '../../database/entities/ChatFlow' -import { RateLimiterManager } from '../../utils/rateLimit' import { InternalFlowiseError } from '../../errors/internalFlowiseError' import { ChatflowType } from '../../Interface' +import apiKeyService from '../../services/apikey' import chatflowsService from '../../services/chatflows' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' +import { checkUsageLimit } from '../../utils/quotaUsage' +import { RateLimiterManager } from '../../utils/rateLimit' const checkIfChatflowIsValidForStreaming = async (req: Request, res: Response, next: NextFunction) => { try { if (typeof req.params === 'undefined' || !req.params.id) { throw new InternalFlowiseError( StatusCodes.PRECONDITION_FAILED, - `Error: chatflowsRouter.checkIfChatflowIsValidForStreaming - id not provided!` + `Error: chatflowsController.checkIfChatflowIsValidForStreaming - id not provided!` ) } const apiResponse = await chatflowsService.checkIfChatflowIsValidForStreaming(req.params.id) @@ -27,7 +29,7 @@ const checkIfChatflowIsValidForUploads = async (req: Request, res: Response, nex if (typeof req.params === 'undefined' || !req.params.id) { throw new InternalFlowiseError( StatusCodes.PRECONDITION_FAILED, - `Error: chatflowsRouter.checkIfChatflowIsValidForUploads - id not provided!` + `Error: chatflowsController.checkIfChatflowIsValidForUploads - id not provided!` ) } const apiResponse = await chatflowsService.checkIfChatflowIsValidForUploads(req.params.id) @@ -40,9 +42,23 @@ const checkIfChatflowIsValidForUploads = async (req: Request, res: Response, nex const deleteChatflow = async (req: Request, res: Response, next: NextFunction) => { try { if (typeof req.params === 'undefined' || !req.params.id) { - throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: chatflowsRouter.deleteChatflow - id not provided!`) + throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: chatflowsController.deleteChatflow - id not provided!`) + } + const orgId = req.user?.activeOrganizationId + if (!orgId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: chatflowsController.deleteChatflow - organization ${orgId} not found!` + ) + } + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: chatflowsController.deleteChatflow - workspace ${workspaceId} not found!` + ) } - const apiResponse = await chatflowsService.deleteChatflow(req.params.id) + const apiResponse = await chatflowsService.deleteChatflow(req.params.id, orgId, workspaceId) return res.json(apiResponse) } catch (error) { next(error) @@ -51,7 +67,7 @@ const deleteChatflow = async (req: Request, res: Response, next: NextFunction) = const getAllChatflows = async (req: Request, res: Response, next: NextFunction) => { try { - const apiResponse = await chatflowsService.getAllChatflows(req.query?.type as ChatflowType) + const apiResponse = await chatflowsService.getAllChatflows(req.query?.type as ChatflowType, req.user?.activeWorkspaceId) return res.json(apiResponse) } catch (error) { next(error) @@ -64,7 +80,7 @@ const getChatflowByApiKey = async (req: Request, res: Response, next: NextFuncti if (typeof req.params === 'undefined' || !req.params.apikey) { throw new InternalFlowiseError( StatusCodes.PRECONDITION_FAILED, - `Error: chatflowsRouter.getChatflowByApiKey - apikey not provided!` + `Error: chatflowsController.getChatflowByApiKey - apikey not provided!` ) } const apikey = await apiKeyService.getApiKey(req.params.apikey) @@ -81,7 +97,7 @@ const getChatflowByApiKey = async (req: Request, res: Response, next: NextFuncti const getChatflowById = async (req: Request, res: Response, next: NextFunction) => { try { if (typeof req.params === 'undefined' || !req.params.id) { - throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: chatflowsRouter.getChatflowById - id not provided!`) + throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: chatflowsController.getChatflowById - id not provided!`) } const apiResponse = await chatflowsService.getChatflowById(req.params.id) return res.json(apiResponse) @@ -93,12 +109,40 @@ const getChatflowById = async (req: Request, res: Response, next: NextFunction) const saveChatflow = async (req: Request, res: Response, next: NextFunction) => { try { if (!req.body) { - throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: chatflowsRouter.saveChatflow - body not provided!`) + throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: chatflowsController.saveChatflow - body not provided!`) + } + const orgId = req.user?.activeOrganizationId + if (!orgId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: chatflowsController.saveChatflow - organization ${orgId} not found!` + ) + } + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: chatflowsController.saveChatflow - workspace ${workspaceId} not found!` + ) } + const subscriptionId = req.user?.activeOrganizationSubscriptionId || '' const body = req.body + + const existingChatflowCount = await chatflowsService.getAllChatflowsCountByOrganization(body.type, orgId) + const newChatflowCount = 1 + await checkUsageLimit('flows', subscriptionId, getRunningExpressApp().usageCacheManager, existingChatflowCount + newChatflowCount) + const newChatFlow = new ChatFlow() Object.assign(newChatFlow, body) - const apiResponse = await chatflowsService.saveChatflow(newChatFlow) + newChatFlow.workspaceId = workspaceId + const apiResponse = await chatflowsService.saveChatflow( + newChatFlow, + orgId, + workspaceId, + subscriptionId, + getRunningExpressApp().usageCacheManager + ) + return res.json(apiResponse) } catch (error) { next(error) @@ -108,7 +152,23 @@ const saveChatflow = async (req: Request, res: Response, next: NextFunction) => const importChatflows = async (req: Request, res: Response, next: NextFunction) => { try { const chatflows: Partial[] = req.body.Chatflows - const apiResponse = await chatflowsService.importChatflows(chatflows) + const orgId = req.user?.activeOrganizationId + if (!orgId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: chatflowsController.saveChatflow - organization ${orgId} not found!` + ) + } + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: chatflowsController.saveChatflow - workspace ${workspaceId} not found!` + ) + } + const subscriptionId = req.user?.activeOrganizationSubscriptionId || '' + req.body.workspaceId = req.user?.activeWorkspaceId + const apiResponse = await chatflowsService.importChatflows(chatflows, orgId, workspaceId, subscriptionId) return res.json(apiResponse) } catch (error) { next(error) @@ -118,13 +178,27 @@ const importChatflows = async (req: Request, res: Response, next: NextFunction) const updateChatflow = async (req: Request, res: Response, next: NextFunction) => { try { if (typeof req.params === 'undefined' || !req.params.id) { - throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: chatflowsRouter.updateChatflow - id not provided!`) + throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: chatflowsController.updateChatflow - id not provided!`) } const chatflow = await chatflowsService.getChatflowById(req.params.id) if (!chatflow) { return res.status(404).send(`Chatflow ${req.params.id} not found`) } - + const orgId = req.user?.activeOrganizationId + if (!orgId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: chatflowsController.saveChatflow - organization ${orgId} not found!` + ) + } + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: chatflowsController.saveChatflow - workspace ${workspaceId} not found!` + ) + } + const subscriptionId = req.user?.activeOrganizationSubscriptionId || '' const body = req.body const updateChatFlow = new ChatFlow() Object.assign(updateChatFlow, body) @@ -133,7 +207,7 @@ const updateChatflow = async (req: Request, res: Response, next: NextFunction) = const rateLimiterManager = RateLimiterManager.getInstance() await rateLimiterManager.updateRateLimiter(updateChatFlow) - const apiResponse = await chatflowsService.updateChatflow(chatflow, updateChatFlow) + const apiResponse = await chatflowsService.updateChatflow(chatflow, updateChatFlow, orgId, workspaceId, subscriptionId) return res.json(apiResponse) } catch (error) { next(error) @@ -145,7 +219,7 @@ const getSinglePublicChatflow = async (req: Request, res: Response, next: NextFu if (typeof req.params === 'undefined' || !req.params.id) { throw new InternalFlowiseError( StatusCodes.PRECONDITION_FAILED, - `Error: chatflowsRouter.getSinglePublicChatflow - id not provided!` + `Error: chatflowsController.getSinglePublicChatflow - id not provided!` ) } const apiResponse = await chatflowsService.getSinglePublicChatflow(req.params.id) @@ -160,7 +234,7 @@ const getSinglePublicChatbotConfig = async (req: Request, res: Response, next: N if (typeof req.params === 'undefined' || !req.params.id) { throw new InternalFlowiseError( StatusCodes.PRECONDITION_FAILED, - `Error: chatflowsRouter.getSinglePublicChatbotConfig - id not provided!` + `Error: chatflowsController.getSinglePublicChatbotConfig - id not provided!` ) } const apiResponse = await chatflowsService.getSinglePublicChatbotConfig(req.params.id) @@ -170,6 +244,27 @@ const getSinglePublicChatbotConfig = async (req: Request, res: Response, next: N } } +const checkIfChatflowHasChanged = async (req: Request, res: Response, next: NextFunction) => { + try { + if (typeof req.params === 'undefined' || !req.params.id) { + throw new InternalFlowiseError( + StatusCodes.PRECONDITION_FAILED, + `Error: chatflowsController.checkIfChatflowHasChanged - id not provided!` + ) + } + if (!req.params.lastUpdatedDateTime) { + throw new InternalFlowiseError( + StatusCodes.PRECONDITION_FAILED, + `Error: chatflowsController.checkIfChatflowHasChanged - lastUpdatedDateTime not provided!` + ) + } + const apiResponse = await chatflowsService.checkIfChatflowHasChanged(req.params.id, req.params.lastUpdatedDateTime) + return res.json(apiResponse) + } catch (error) { + next(error) + } +} + export default { checkIfChatflowIsValidForStreaming, checkIfChatflowIsValidForUploads, @@ -181,5 +276,6 @@ export default { importChatflows, updateChatflow, getSinglePublicChatflow, - getSinglePublicChatbotConfig + getSinglePublicChatbotConfig, + checkIfChatflowHasChanged } diff --git a/packages/server/src/controllers/credentials/index.ts b/packages/server/src/controllers/credentials/index.ts index ad937443045..78cb4348c5e 100644 --- a/packages/server/src/controllers/credentials/index.ts +++ b/packages/server/src/controllers/credentials/index.ts @@ -11,7 +11,9 @@ const createCredential = async (req: Request, res: Response, next: NextFunction) `Error: credentialsController.createCredential - body not provided!` ) } - const apiResponse = await credentialsService.createCredential(req.body) + const body = req.body + body.workspaceId = req.user?.activeWorkspaceId + const apiResponse = await credentialsService.createCredential(body) return res.json(apiResponse) } catch (error) { next(error) @@ -35,7 +37,7 @@ const deleteCredentials = async (req: Request, res: Response, next: NextFunction const getAllCredentials = async (req: Request, res: Response, next: NextFunction) => { try { - const apiResponse = await credentialsService.getAllCredentials(req.query.credentialName) + const apiResponse = await credentialsService.getAllCredentials(req.query.credentialName, req.user?.activeWorkspaceId) return res.json(apiResponse) } catch (error) { next(error) @@ -50,7 +52,7 @@ const getCredentialById = async (req: Request, res: Response, next: NextFunction `Error: credentialsController.getCredentialById - id not provided!` ) } - const apiResponse = await credentialsService.getCredentialById(req.params.id) + const apiResponse = await credentialsService.getCredentialById(req.params.id, req.user?.activeWorkspaceId) return res.json(apiResponse) } catch (error) { next(error) diff --git a/packages/server/src/controllers/dataset/index.ts b/packages/server/src/controllers/dataset/index.ts new file mode 100644 index 00000000000..f4d08de970b --- /dev/null +++ b/packages/server/src/controllers/dataset/index.ts @@ -0,0 +1,143 @@ +import { Request, Response, NextFunction } from 'express' +import { InternalFlowiseError } from '../../errors/internalFlowiseError' +import datasetService from '../../services/dataset' +import { StatusCodes } from 'http-status-codes' + +const getAllDatasets = async (req: Request, res: Response, next: NextFunction) => { + try { + const apiResponse = await datasetService.getAllDatasets(req.user?.activeWorkspaceId) + return res.json(apiResponse) + } catch (error) { + next(error) + } +} + +const getDataset = async (req: Request, res: Response, next: NextFunction) => { + try { + if (typeof req.params === 'undefined' || !req.params.id) { + throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: datasetService.getDataset - id not provided!`) + } + const apiResponse = await datasetService.getDataset(req.params.id) + return res.json(apiResponse) + } catch (error) { + next(error) + } +} + +const createDataset = async (req: Request, res: Response, next: NextFunction) => { + try { + if (!req.body) { + throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: datasetService.createDataset - body not provided!`) + } + const body = req.body + body.workspaceId = req.user?.activeWorkspaceId + const apiResponse = await datasetService.createDataset(body) + return res.json(apiResponse) + } catch (error) { + next(error) + } +} + +const updateDataset = async (req: Request, res: Response, next: NextFunction) => { + try { + if (!req.body) { + throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: datasetService.updateDataset - body not provided!`) + } + if (typeof req.params === 'undefined' || !req.params.id) { + throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: datasetService.updateDataset - id not provided!`) + } + const apiResponse = await datasetService.updateDataset(req.params.id, req.body) + return res.json(apiResponse) + } catch (error) { + next(error) + } +} + +const deleteDataset = async (req: Request, res: Response, next: NextFunction) => { + try { + if (typeof req.params === 'undefined' || !req.params.id) { + throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: datasetService.deleteDataset - id not provided!`) + } + const apiResponse = await datasetService.deleteDataset(req.params.id) + return res.json(apiResponse) + } catch (error) { + next(error) + } +} + +const addDatasetRow = async (req: Request, res: Response, next: NextFunction) => { + try { + if (!req.body) { + throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: datasetService.addDatasetRow - body not provided!`) + } + if (!req.body.datasetId) { + throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: datasetService.addDatasetRow - datasetId not provided!`) + } + const apiResponse = await datasetService.addDatasetRow(req.body) + return res.json(apiResponse) + } catch (error) { + next(error) + } +} + +const updateDatasetRow = async (req: Request, res: Response, next: NextFunction) => { + try { + if (!req.body) { + throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: datasetService.updateDatasetRow - body not provided!`) + } + if (typeof req.params === 'undefined' || !req.params.id) { + throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: datasetService.updateDatasetRow - id not provided!`) + } + const apiResponse = await datasetService.updateDatasetRow(req.params.id, req.body) + return res.json(apiResponse) + } catch (error) { + next(error) + } +} + +const deleteDatasetRow = async (req: Request, res: Response, next: NextFunction) => { + try { + if (typeof req.params === 'undefined' || !req.params.id) { + throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: datasetService.deleteDatasetRow - id not provided!`) + } + const apiResponse = await datasetService.deleteDatasetRow(req.params.id) + return res.json(apiResponse) + } catch (error) { + next(error) + } +} + +const patchDeleteRows = async (req: Request, res: Response, next: NextFunction) => { + try { + const ids = req.body.ids ?? [] + const apiResponse = await datasetService.patchDeleteRows(ids) + return res.json(apiResponse) + } catch (error) { + next(error) + } +} + +const reorderDatasetRow = async (req: Request, res: Response, next: NextFunction) => { + try { + if (!req.body) { + throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: datasetService.reorderDatasetRow - body not provided!`) + } + + const apiResponse = await datasetService.reorderDatasetRow(req.body.datasetId, req.body.rows) + return res.json(apiResponse) + } catch (error) { + next(error) + } +} +export default { + getAllDatasets, + getDataset, + createDataset, + updateDataset, + deleteDataset, + addDatasetRow, + updateDatasetRow, + deleteDatasetRow, + patchDeleteRows, + reorderDatasetRow +} diff --git a/packages/server/src/controllers/documentstore/index.ts b/packages/server/src/controllers/documentstore/index.ts index 36b1402e1d8..04f1f339a42 100644 --- a/packages/server/src/controllers/documentstore/index.ts +++ b/packages/server/src/controllers/documentstore/index.ts @@ -15,9 +15,20 @@ const createDocumentStore = async (req: Request, res: Response, next: NextFuncti `Error: documentStoreController.createDocumentStore - body not provided!` ) } + + const orgId = req.user?.activeOrganizationId + if (!orgId) { + throw new InternalFlowiseError( + StatusCodes.PRECONDITION_FAILED, + `Error: documentStoreController.createDocumentStore - organizationId not provided!` + ) + } + const body = req.body + body.workspaceId = req.user?.activeWorkspaceId + const docStore = DocumentStoreDTO.toEntity(body) - const apiResponse = await documentStoreService.createDocumentStore(docStore) + const apiResponse = await documentStoreService.createDocumentStore(docStore, orgId) return res.json(apiResponse) } catch (error) { next(error) @@ -26,7 +37,7 @@ const createDocumentStore = async (req: Request, res: Response, next: NextFuncti const getAllDocumentStores = async (req: Request, res: Response, next: NextFunction) => { try { - const apiResponse = await documentStoreService.getAllDocumentStores() + const apiResponse = await documentStoreService.getAllDocumentStores(req.user?.activeWorkspaceId) return res.json(DocumentStoreDTO.fromEntities(apiResponse)) } catch (error) { next(error) @@ -44,7 +55,29 @@ const deleteLoaderFromDocumentStore = async (req: Request, res: Response, next: `Error: documentStoreController.deleteLoaderFromDocumentStore - missing storeId or loaderId.` ) } - const apiResponse = await documentStoreService.deleteLoaderFromDocumentStore(storeId, loaderId) + + const orgId = req.user?.activeOrganizationId + if (!orgId) { + throw new InternalFlowiseError( + StatusCodes.PRECONDITION_FAILED, + `Error: documentStoreController.createDocumentStore - organizationId not provided!` + ) + } + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.PRECONDITION_FAILED, + `Error: documentStoreController.createDocumentStore - workspaceId not provided!` + ) + } + + const apiResponse = await documentStoreService.deleteLoaderFromDocumentStore( + storeId, + loaderId, + orgId, + workspaceId, + getRunningExpressApp().usageCacheManager + ) return res.json(DocumentStoreDTO.fromEntity(apiResponse)) } catch (error) { next(error) @@ -199,10 +232,33 @@ const processLoader = async (req: Request, res: Response, next: NextFunction) => `Error: documentStoreController.processLoader - body not provided!` ) } + const orgId = req.user?.activeOrganizationId + if (!orgId) { + throw new InternalFlowiseError( + StatusCodes.PRECONDITION_FAILED, + `Error: documentStoreController.createDocumentStore - organizationId not provided!` + ) + } + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.PRECONDITION_FAILED, + `Error: documentStoreController.createDocumentStore - workspaceId not provided!` + ) + } + const subscriptionId = req.user?.activeOrganizationSubscriptionId || '' const docLoaderId = req.params.loaderId const body = req.body const isInternalRequest = req.headers['x-request-from'] === 'internal' - const apiResponse = await documentStoreService.processLoaderMiddleware(body, docLoaderId, isInternalRequest) + const apiResponse = await documentStoreService.processLoaderMiddleware( + body, + docLoaderId, + orgId, + workspaceId, + subscriptionId, + getRunningExpressApp().usageCacheManager, + isInternalRequest + ) return res.json(apiResponse) } catch (error) { next(error) @@ -248,7 +304,26 @@ const deleteDocumentStore = async (req: Request, res: Response, next: NextFuncti `Error: documentStoreController.deleteDocumentStore - storeId not provided!` ) } - const apiResponse = await documentStoreService.deleteDocumentStore(req.params.id) + const orgId = req.user?.activeOrganizationId + if (!orgId) { + throw new InternalFlowiseError( + StatusCodes.PRECONDITION_FAILED, + `Error: documentStoreController.createDocumentStore - organizationId not provided!` + ) + } + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.PRECONDITION_FAILED, + `Error: documentStoreController.createDocumentStore - workspaceId not provided!` + ) + } + const apiResponse = await documentStoreService.deleteDocumentStore( + req.params.id, + orgId, + workspaceId, + getRunningExpressApp().usageCacheManager + ) return res.json(apiResponse) } catch (error) { next(error) @@ -263,9 +338,30 @@ const previewFileChunks = async (req: Request, res: Response, next: NextFunction `Error: documentStoreController.previewFileChunks - body not provided!` ) } + const orgId = req.user?.activeOrganizationId + if (!orgId) { + throw new InternalFlowiseError( + StatusCodes.PRECONDITION_FAILED, + `Error: documentStoreController.createDocumentStore - organizationId not provided!` + ) + } + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.PRECONDITION_FAILED, + `Error: documentStoreController.createDocumentStore - workspaceId not provided!` + ) + } + const subscriptionId = req.user?.activeOrganizationSubscriptionId || '' const body = req.body body.preview = true - const apiResponse = await documentStoreService.previewChunksMiddleware(body) + const apiResponse = await documentStoreService.previewChunksMiddleware( + body, + orgId, + workspaceId, + subscriptionId, + getRunningExpressApp().usageCacheManager + ) return res.json(apiResponse) } catch (error) { next(error) @@ -286,8 +382,30 @@ const insertIntoVectorStore = async (req: Request, res: Response, next: NextFunc if (typeof req.body === 'undefined') { throw new Error('Error: documentStoreController.insertIntoVectorStore - body not provided!') } + const orgId = req.user?.activeOrganizationId + if (!orgId) { + throw new InternalFlowiseError( + StatusCodes.PRECONDITION_FAILED, + `Error: documentStoreController.createDocumentStore - organizationId not provided!` + ) + } + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.PRECONDITION_FAILED, + `Error: documentStoreController.createDocumentStore - workspaceId not provided!` + ) + } + const subscriptionId = req.user?.activeOrganizationSubscriptionId || '' const body = req.body - const apiResponse = await documentStoreService.insertIntoVectorStoreMiddleware(body) + const apiResponse = await documentStoreService.insertIntoVectorStoreMiddleware( + body, + false, + orgId, + workspaceId, + subscriptionId, + getRunningExpressApp().usageCacheManager + ) getRunningExpressApp().metricsProvider?.incrementCounter(FLOWISE_METRIC_COUNTERS.VECTORSTORE_UPSERT, { status: FLOWISE_COUNTER_STATUS.SUCCESS }) @@ -393,9 +511,32 @@ const upsertDocStoreMiddleware = async (req: Request, res: Response, next: NextF if (typeof req.body === 'undefined') { throw new Error('Error: documentStoreController.upsertDocStoreMiddleware - body not provided!') } + const orgId = req.user?.activeOrganizationId + if (!orgId) { + throw new InternalFlowiseError( + StatusCodes.PRECONDITION_FAILED, + `Error: documentStoreController.createDocumentStore - organizationId not provided!` + ) + } + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.PRECONDITION_FAILED, + `Error: documentStoreController.createDocumentStore - workspaceId not provided!` + ) + } + const subscriptionId = req.user?.activeOrganizationSubscriptionId || '' const body = req.body const files = (req.files as Express.Multer.File[]) || [] - const apiResponse = await documentStoreService.upsertDocStoreMiddleware(req.params.id, body, files) + const apiResponse = await documentStoreService.upsertDocStoreMiddleware( + req.params.id, + body, + files, + orgId, + workspaceId, + subscriptionId, + getRunningExpressApp().usageCacheManager + ) getRunningExpressApp().metricsProvider?.incrementCounter(FLOWISE_METRIC_COUNTERS.VECTORSTORE_UPSERT, { status: FLOWISE_COUNTER_STATUS.SUCCESS }) @@ -416,8 +557,30 @@ const refreshDocStoreMiddleware = async (req: Request, res: Response, next: Next `Error: documentStoreController.refreshDocStoreMiddleware - storeId not provided!` ) } + const orgId = req.user?.activeOrganizationId + if (!orgId) { + throw new InternalFlowiseError( + StatusCodes.PRECONDITION_FAILED, + `Error: documentStoreController.createDocumentStore - organizationId not provided!` + ) + } + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.PRECONDITION_FAILED, + `Error: documentStoreController.createDocumentStore - workspaceId not provided!` + ) + } + const subscriptionId = req.user?.activeOrganizationSubscriptionId || '' const body = req.body - const apiResponse = await documentStoreService.refreshDocStoreMiddleware(req.params.id, body) + const apiResponse = await documentStoreService.refreshDocStoreMiddleware( + req.params.id, + body, + orgId, + workspaceId, + subscriptionId, + getRunningExpressApp().usageCacheManager + ) getRunningExpressApp().metricsProvider?.incrementCounter(FLOWISE_METRIC_COUNTERS.VECTORSTORE_UPSERT, { status: FLOWISE_COUNTER_STATUS.SUCCESS }) diff --git a/packages/server/src/controllers/evaluations/index.ts b/packages/server/src/controllers/evaluations/index.ts new file mode 100644 index 00000000000..9ae7cb000d2 --- /dev/null +++ b/packages/server/src/controllers/evaluations/index.ts @@ -0,0 +1,135 @@ +import { Request, Response, NextFunction } from 'express' +import { InternalFlowiseError } from '../../errors/internalFlowiseError' +import { StatusCodes } from 'http-status-codes' +import evaluationsService from '../../services/evaluations' + +const createEvaluation = async (req: Request, res: Response, next: NextFunction) => { + try { + if (!req.body) { + throw new InternalFlowiseError( + StatusCodes.PRECONDITION_FAILED, + `Error: evaluationsService.createEvaluation - body not provided!` + ) + } + const orgId = req.user?.activeOrganizationId + if (!orgId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: evaluationsService.createEvaluation - organization ${orgId} not found!` + ) + } + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: evaluationsService.createEvaluation - workspace ${workspaceId} not found!` + ) + } + const body = req.body + body.workspaceId = workspaceId + + const httpProtocol = req.get('x-forwarded-proto') || req.get('X-Forwarded-Proto') || req.protocol + const baseURL = `${httpProtocol}://${req.get('host')}` + const apiResponse = await evaluationsService.createEvaluation(body, baseURL, orgId) + return res.json(apiResponse) + } catch (error) { + next(error) + } +} + +const runAgain = async (req: Request, res: Response, next: NextFunction) => { + try { + if (typeof req.params === 'undefined' || !req.params.id) { + throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: evaluationsService.runAgain - id not provided!`) + } + const orgId = req.user?.activeOrganizationId + if (!orgId) { + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Error: evaluationsService.runAgain - organization ${orgId} not found!`) + } + const httpProtocol = req.get('x-forwarded-proto') || req.get('X-Forwarded-Proto') || req.protocol + const baseURL = `${httpProtocol}://${req.get('host')}` + const apiResponse = await evaluationsService.runAgain(req.params.id, baseURL, orgId) + return res.json(apiResponse) + } catch (error) { + next(error) + } +} + +const getEvaluation = async (req: Request, res: Response, next: NextFunction) => { + try { + if (typeof req.params === 'undefined' || !req.params.id) { + throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: evaluationsService.getEvaluation - id not provided!`) + } + const apiResponse = await evaluationsService.getEvaluation(req.params.id) + return res.json(apiResponse) + } catch (error) { + next(error) + } +} + +const deleteEvaluation = async (req: Request, res: Response, next: NextFunction) => { + try { + if (typeof req.params === 'undefined' || !req.params.id) { + throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: evaluationsService.deleteEvaluation - id not provided!`) + } + const apiResponse = await evaluationsService.deleteEvaluation(req.params.id, req.user?.activeWorkspaceId) + return res.json(apiResponse) + } catch (error) { + next(error) + } +} + +const getAllEvaluations = async (req: Request, res: Response, next: NextFunction) => { + try { + const apiResponse = await evaluationsService.getAllEvaluations(req.user?.activeWorkspaceId) + return res.json(apiResponse) + } catch (error) { + next(error) + } +} + +const isOutdated = async (req: Request, res: Response, next: NextFunction) => { + try { + if (typeof req.params === 'undefined' || !req.params.id) { + throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: evaluationsService.isOutdated - id not provided!`) + } + const apiResponse = await evaluationsService.isOutdated(req.params.id) + return res.json(apiResponse) + } catch (error) { + next(error) + } +} + +const getVersions = async (req: Request, res: Response, next: NextFunction) => { + try { + if (typeof req.params === 'undefined' || !req.params.id) { + throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: evaluationsService.getVersions - id not provided!`) + } + const apiResponse = await evaluationsService.getVersions(req.params.id) + return res.json(apiResponse) + } catch (error) { + next(error) + } +} + +const patchDeleteEvaluations = async (req: Request, res: Response, next: NextFunction) => { + try { + const ids = req.body.ids ?? [] + const isDeleteAllVersion = req.body.isDeleteAllVersion ?? false + const apiResponse = await evaluationsService.patchDeleteEvaluations(ids, isDeleteAllVersion, req.user?.activeWorkspaceId) + return res.json(apiResponse) + } catch (error) { + next(error) + } +} + +export default { + createEvaluation, + getEvaluation, + deleteEvaluation, + getAllEvaluations, + isOutdated, + runAgain, + getVersions, + patchDeleteEvaluations +} diff --git a/packages/server/src/controllers/evaluators/index.ts b/packages/server/src/controllers/evaluators/index.ts new file mode 100644 index 00000000000..3f4e2913353 --- /dev/null +++ b/packages/server/src/controllers/evaluators/index.ts @@ -0,0 +1,74 @@ +import { Request, Response, NextFunction } from 'express' +import { InternalFlowiseError } from '../../errors/internalFlowiseError' +import { StatusCodes } from 'http-status-codes' +import evaluatorService from '../../services/evaluator' + +const getAllEvaluators = async (req: Request, res: Response, next: NextFunction) => { + try { + const apiResponse = await evaluatorService.getAllEvaluators(req.user?.activeWorkspaceId) + return res.json(apiResponse) + } catch (error) { + next(error) + } +} + +const getEvaluator = async (req: Request, res: Response, next: NextFunction) => { + try { + if (typeof req.params === 'undefined' || !req.params.id) { + throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: evaluatorService.getEvaluator - id not provided!`) + } + const apiResponse = await evaluatorService.getEvaluator(req.params.id) + return res.json(apiResponse) + } catch (error) { + next(error) + } +} + +const createEvaluator = async (req: Request, res: Response, next: NextFunction) => { + try { + if (!req.body) { + throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: evaluatorService.createEvaluator - body not provided!`) + } + const body = req.body + body.workspaceId = req.user?.activeWorkspaceId + const apiResponse = await evaluatorService.createEvaluator(body) + return res.json(apiResponse) + } catch (error) { + next(error) + } +} + +const updateEvaluator = async (req: Request, res: Response, next: NextFunction) => { + try { + if (!req.body) { + throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: evaluatorService.updateEvaluator - body not provided!`) + } + if (typeof req.params === 'undefined' || !req.params.id) { + throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: evaluatorService.updateEvaluator - id not provided!`) + } + const apiResponse = await evaluatorService.updateEvaluator(req.params.id, req.body) + return res.json(apiResponse) + } catch (error) { + next(error) + } +} + +const deleteEvaluator = async (req: Request, res: Response, next: NextFunction) => { + try { + if (typeof req.params === 'undefined' || !req.params.id) { + throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: evaluatorService.deleteEvaluator - id not provided!`) + } + const apiResponse = await evaluatorService.deleteEvaluator(req.params.id) + return res.json(apiResponse) + } catch (error) { + next(error) + } +} + +export default { + getAllEvaluators, + getEvaluator, + createEvaluator, + updateEvaluator, + deleteEvaluator +} diff --git a/packages/server/src/controllers/executions/index.ts b/packages/server/src/controllers/executions/index.ts index 85ba3c729fc..7e3d80ae5da 100644 --- a/packages/server/src/controllers/executions/index.ts +++ b/packages/server/src/controllers/executions/index.ts @@ -5,7 +5,8 @@ import { ExecutionState } from '../../Interface' const getExecutionById = async (req: Request, res: Response, next: NextFunction) => { try { const executionId = req.params.id - const execution = await executionsService.getExecutionById(executionId) + const workspaceId = req.user?.activeWorkspaceId + const execution = await executionsService.getExecutionById(executionId, workspaceId) return res.json(execution) } catch (error) { next(error) @@ -25,7 +26,8 @@ const getPublicExecutionById = async (req: Request, res: Response, next: NextFun const updateExecution = async (req: Request, res: Response, next: NextFunction) => { try { const executionId = req.params.id - const execution = await executionsService.updateExecution(executionId, req.body) + const workspaceId = req.user?.activeWorkspaceId + const execution = await executionsService.updateExecution(executionId, req.body, workspaceId) return res.json(execution) } catch (error) { next(error) @@ -37,6 +39,9 @@ const getAllExecutions = async (req: Request, res: Response, next: NextFunction) // Extract all possible filters from query params const filters: any = {} + // Add workspace ID filter + filters.workspaceId = req.user?.activeWorkspaceId + // ID filter if (req.query.id) filters.id = req.query.id as string @@ -86,6 +91,7 @@ const getAllExecutions = async (req: Request, res: Response, next: NextFunction) const deleteExecutions = async (req: Request, res: Response, next: NextFunction) => { try { let executionIds: string[] = [] + const workspaceId = req.user?.activeWorkspaceId // Check if we're deleting a single execution from URL param if (req.params.id) { @@ -98,7 +104,7 @@ const deleteExecutions = async (req: Request, res: Response, next: NextFunction) return res.status(400).json({ success: false, message: 'No execution IDs provided' }) } - const result = await executionsService.deleteExecutions(executionIds) + const result = await executionsService.deleteExecutions(executionIds, workspaceId) return res.json(result) } catch (error) { next(error) diff --git a/packages/server/src/controllers/export-import/index.ts b/packages/server/src/controllers/export-import/index.ts index ba9d853796c..91c6d485bb7 100644 --- a/packages/server/src/controllers/export-import/index.ts +++ b/packages/server/src/controllers/export-import/index.ts @@ -1,9 +1,14 @@ import { NextFunction, Request, Response } from 'express' +import { StatusCodes } from 'http-status-codes' +import { InternalFlowiseError } from '../../errors/internalFlowiseError' import exportImportService from '../../services/export-import' const exportData = async (req: Request, res: Response, next: NextFunction) => { try { - const apiResponse = await exportImportService.exportData(exportImportService.convertExportInput(req.body)) + const apiResponse = await exportImportService.exportData( + exportImportService.convertExportInput(req.body), + req.user?.activeWorkspaceId + ) return res.json(apiResponse) } catch (error) { next(error) @@ -12,8 +17,28 @@ const exportData = async (req: Request, res: Response, next: NextFunction) => { const importData = async (req: Request, res: Response, next: NextFunction) => { try { + const orgId = req.user?.activeOrganizationId + if (!orgId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: exportImportController.importData - organization ${orgId} not found!` + ) + } + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: exportImportController.importData - workspace ${workspaceId} not found!` + ) + } + const subscriptionId = req.user?.activeOrganizationSubscriptionId || '' + const importData = req.body - await exportImportService.importData(importData) + if (!importData) { + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, 'Error: exportImportController.importData - importData is required!') + } + + await exportImportService.importData(importData, orgId, workspaceId, subscriptionId) return res.json({ message: 'success' }) } catch (error) { next(error) diff --git a/packages/server/src/controllers/files/index.ts b/packages/server/src/controllers/files/index.ts new file mode 100644 index 00000000000..37f508a4023 --- /dev/null +++ b/packages/server/src/controllers/files/index.ts @@ -0,0 +1,59 @@ +import path from 'path' +import { NextFunction, Request, Response } from 'express' +import { getFilesListFromStorage, getStoragePath, removeSpecificFileFromStorage } from 'flowise-components' +import { updateStorageUsage } from '../../utils/quotaUsage' +import { InternalFlowiseError } from '../../errors/internalFlowiseError' +import { StatusCodes } from 'http-status-codes' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' + +const getAllFiles = async (req: Request, res: Response, next: NextFunction) => { + try { + const activeOrganizationId = req.user?.activeOrganizationId + if (!activeOrganizationId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: filesController.getAllFiles - organization ${activeOrganizationId} not found!` + ) + } + const apiResponse = await getFilesListFromStorage(activeOrganizationId) + const filesList = apiResponse.map((file: any) => ({ + ...file, + // replace org id because we don't want to expose it + path: file.path.replace(getStoragePath(), '').replace(`${path.sep}${activeOrganizationId}${path.sep}`, '') + })) + return res.json(filesList) + } catch (error) { + next(error) + } +} + +const deleteFile = async (req: Request, res: Response, next: NextFunction) => { + try { + const activeOrganizationId = req.user?.activeOrganizationId + if (!activeOrganizationId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: filesController.deleteFile - organization ${activeOrganizationId} not found!` + ) + } + const activeWorkspaceId = req.user?.activeWorkspaceId + if (!activeWorkspaceId) { + throw new InternalFlowiseError( + StatusCodes.NOT_FOUND, + `Error: filesController.deleteFile - workspace ${activeWorkspaceId} not found!` + ) + } + const filePath = req.query.path as string + const paths = filePath.split(path.sep).filter((path) => path !== '') + const { totalSize } = await removeSpecificFileFromStorage(activeOrganizationId, ...paths) + await updateStorageUsage(activeOrganizationId, activeWorkspaceId, totalSize, getRunningExpressApp().usageCacheManager) + return res.json({ message: 'file_deleted' }) + } catch (error) { + next(error) + } +} + +export default { + getAllFiles, + deleteFile +} diff --git a/packages/server/src/controllers/get-upload-file/index.ts b/packages/server/src/controllers/get-upload-file/index.ts index a33b73e0a8f..7d2a4b0aa63 100644 --- a/packages/server/src/controllers/get-upload-file/index.ts +++ b/packages/server/src/controllers/get-upload-file/index.ts @@ -4,6 +4,9 @@ import contentDisposition from 'content-disposition' import { streamStorageFile } from 'flowise-components' import { StatusCodes } from 'http-status-codes' import { InternalFlowiseError } from '../../errors/internalFlowiseError' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' +import { ChatFlow } from '../../database/entities/ChatFlow' +import { Workspace } from '../../enterprise/database/entities/workspace.entity' const streamUploadedFile = async (req: Request, res: Response, next: NextFunction) => { try { @@ -13,8 +16,27 @@ const streamUploadedFile = async (req: Request, res: Response, next: NextFunctio const chatflowId = req.query.chatflowId as string const chatId = req.query.chatId as string const fileName = req.query.fileName as string + + const appServer = getRunningExpressApp() + + // This can be public API, so we can only get orgId from the chatflow + const chatflow = await appServer.AppDataSource.getRepository(ChatFlow).findOneBy({ + id: chatflowId + }) + if (!chatflow) { + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Chatflow ${chatflowId} not found`) + } + const chatflowWorkspaceId = chatflow.workspaceId + const workspace = await appServer.AppDataSource.getRepository(Workspace).findOneBy({ + id: chatflowWorkspaceId + }) + if (!workspace) { + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Workspace ${chatflowWorkspaceId} not found`) + } + const orgId = workspace.organizationId as string + res.setHeader('Content-Disposition', contentDisposition(fileName)) - const fileStream = await streamStorageFile(chatflowId, chatId, fileName) + const fileStream = await streamStorageFile(chatflowId, chatId, fileName, orgId) if (!fileStream) throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `Error: streamStorageFile`) diff --git a/packages/server/src/controllers/log/index.ts b/packages/server/src/controllers/log/index.ts new file mode 100644 index 00000000000..9df968e0619 --- /dev/null +++ b/packages/server/src/controllers/log/index.ts @@ -0,0 +1,16 @@ +import { Request, Response, NextFunction } from 'express' +import logService from '../../services/log' + +// Get logs +const getLogs = async (req: Request, res: Response, next: NextFunction) => { + try { + const apiResponse = await logService.getLogs(req.query?.startDate as string, req.query?.endDate as string) + res.send(apiResponse) + } catch (error) { + next(error) + } +} + +export default { + getLogs +} diff --git a/packages/server/src/controllers/marketplaces/index.ts b/packages/server/src/controllers/marketplaces/index.ts index db947151f18..5d19d40e6cd 100644 --- a/packages/server/src/controllers/marketplaces/index.ts +++ b/packages/server/src/controllers/marketplaces/index.ts @@ -30,7 +30,7 @@ const deleteCustomTemplate = async (req: Request, res: Response, next: NextFunct const getAllCustomTemplates = async (req: Request, res: Response, next: NextFunction) => { try { - const apiResponse = await marketplacesService.getAllCustomTemplates() + const apiResponse = await marketplacesService.getAllCustomTemplates(req.user?.activeWorkspaceId) return res.json(apiResponse) } catch (error) { next(error) @@ -45,7 +45,9 @@ const saveCustomTemplate = async (req: Request, res: Response, next: NextFunctio `Error: marketplacesService.saveCustomTemplate - body not provided!` ) } - const apiResponse = await marketplacesService.saveCustomTemplate(req.body) + const body = req.body + body.workspaceId = req.user?.activeWorkspaceId + const apiResponse = await marketplacesService.saveCustomTemplate(body) return res.json(apiResponse) } catch (error) { next(error) diff --git a/packages/server/src/controllers/nodes/index.ts b/packages/server/src/controllers/nodes/index.ts index 05b72c546d0..5c7295d8d9e 100644 --- a/packages/server/src/controllers/nodes/index.ts +++ b/packages/server/src/controllers/nodes/index.ts @@ -3,6 +3,7 @@ import _ from 'lodash' import nodesService from '../../services/nodes' import { InternalFlowiseError } from '../../errors/internalFlowiseError' import { StatusCodes } from 'http-status-codes' +import { getWorkspaceSearchOptionsFromReq } from '../../enterprise/utils/ControllerServiceUtils' const getAllNodes = async (req: Request, res: Response, next: NextFunction) => { try { @@ -67,7 +68,9 @@ const getSingleNodeAsyncOptions = async (req: Request, res: Response, next: Next `Error: nodesController.getSingleNodeAsyncOptions - name not provided!` ) } - const apiResponse = await nodesService.getSingleNodeAsyncOptions(req.params.name, req.body) + const body = req.body + body.searchOptions = getWorkspaceSearchOptionsFromReq(req) + const apiResponse = await nodesService.getSingleNodeAsyncOptions(req.params.name, body) return res.json(apiResponse) } catch (error) { next(error) @@ -82,7 +85,8 @@ const executeCustomFunction = async (req: Request, res: Response, next: NextFunc `Error: nodesController.executeCustomFunction - body not provided!` ) } - const apiResponse = await nodesService.executeCustomFunction(req.body) + const orgId = req.user?.activeOrganizationId + const apiResponse = await nodesService.executeCustomFunction(req.body, orgId) return res.json(apiResponse) } catch (error) { next(error) diff --git a/packages/server/src/controllers/openai-assistants/index.ts b/packages/server/src/controllers/openai-assistants/index.ts index 1b516af8c03..0e5f9140024 100644 --- a/packages/server/src/controllers/openai-assistants/index.ts +++ b/packages/server/src/controllers/openai-assistants/index.ts @@ -5,6 +5,9 @@ import contentDisposition from 'content-disposition' import { InternalFlowiseError } from '../../errors/internalFlowiseError' import { StatusCodes } from 'http-status-codes' import { streamStorageFile } from 'flowise-components' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' +import { ChatFlow } from '../../database/entities/ChatFlow' +import { Workspace } from '../../enterprise/database/entities/workspace.entity' // List available assistants const getAllOpenaiAssistants = async (req: Request, res: Response, next: NextFunction) => { @@ -50,11 +53,29 @@ const getFileFromAssistant = async (req: Request, res: Response, next: NextFunct if (!req.body.chatflowId || !req.body.chatId || !req.body.fileName) { return res.status(500).send(`Invalid file path`) } + const appServer = getRunningExpressApp() const chatflowId = req.body.chatflowId as string const chatId = req.body.chatId as string const fileName = req.body.fileName as string + + // This can be public API, so we can only get orgId from the chatflow + const chatflow = await appServer.AppDataSource.getRepository(ChatFlow).findOneBy({ + id: chatflowId + }) + if (!chatflow) { + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Chatflow ${chatflowId} not found`) + } + const chatflowWorkspaceId = chatflow.workspaceId + const workspace = await appServer.AppDataSource.getRepository(Workspace).findOneBy({ + id: chatflowWorkspaceId + }) + if (!workspace) { + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Workspace ${chatflowWorkspaceId} not found`) + } + const orgId = workspace.organizationId as string + res.setHeader('Content-Disposition', contentDisposition(fileName)) - const fileStream = await streamStorageFile(chatflowId, chatId, fileName) + const fileStream = await streamStorageFile(chatflowId, chatId, fileName, orgId) if (!fileStream) throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `Error: getFileFromAssistant`) diff --git a/packages/server/src/controllers/pricing/index.ts b/packages/server/src/controllers/pricing/index.ts new file mode 100644 index 00000000000..3e06034720d --- /dev/null +++ b/packages/server/src/controllers/pricing/index.ts @@ -0,0 +1,81 @@ +import { Request, Response, NextFunction } from 'express' + +const getPricing = async (req: Request, res: Response, next: NextFunction) => { + try { + const PRODUCT_IDS = { + FREE: process.env.CLOUD_FREE_ID, + STARTER: process.env.CLOUD_STARTER_ID, + PRO: process.env.CLOUD_PRO_ID + } + const pricingPlans = [ + { + prodId: PRODUCT_IDS.FREE, + title: 'Free', + subtitle: 'For trying out the platform', + price: '$0', + period: '/month', + features: [ + { text: '2 Flows & Assistants' }, + { text: '100 Predictions / month' }, + { text: '5MB Storage' }, + { text: 'Evaluations & Metrics' }, + { text: 'Custom Embedded Chatbot Branding' }, + { text: 'Community Support' } + ] + }, + { + prodId: PRODUCT_IDS.STARTER, + title: 'Starter', + subtitle: 'For individuals & small teams', + mostPopular: true, + price: '$35', + period: '/month', + features: [ + { text: 'Everything in Free plan, plus' }, + { text: 'Unlimited Flows & Assistants' }, + { text: '10,000 Predictions / month' }, + { text: '1GB Storage' }, + { text: 'Email Support' } + ] + }, + { + prodId: PRODUCT_IDS.PRO, + title: 'Pro', + subtitle: 'For medium-sized businesses', + price: '$65', + period: '/month', + features: [ + { text: 'Everything in Starter plan, plus' }, + { text: '50,000 Predictions / month' }, + { text: '10GB Storage' }, + { text: 'Unlimited Workspaces' }, + { text: '5 users', subtext: '+ $15/user/month' }, + { text: 'Admin Roles & Permissions' }, + { text: 'Priority Support' } + ] + }, + { + title: 'Enterprise', + subtitle: 'For large organizations', + price: 'Contact Us', + features: [ + { text: 'On-Premise Deployment' }, + { text: 'Air-gapped Environments' }, + { text: 'SSO & SAML' }, + { text: 'LDAP & RBAC' }, + { text: 'Versioning' }, + { text: 'Audit Logs' }, + { text: '99.99% Uptime SLA' }, + { text: 'Personalized Support' } + ] + } + ] + return res.status(200).json(pricingPlans) + } catch (error) { + next(error) + } +} + +export default { + getPricing +} diff --git a/packages/server/src/controllers/settings/index.ts b/packages/server/src/controllers/settings/index.ts new file mode 100644 index 00000000000..256ad3013fd --- /dev/null +++ b/packages/server/src/controllers/settings/index.ts @@ -0,0 +1,15 @@ +import { Request, Response, NextFunction } from 'express' +import settingsService from '../../services/settings' + +const getSettingsList = async (req: Request, res: Response, next: NextFunction) => { + try { + const apiResponse = await settingsService.getSettings() + return res.json(apiResponse) + } catch (error) { + next(error) + } +} + +export default { + getSettingsList +} diff --git a/packages/server/src/controllers/tools/index.ts b/packages/server/src/controllers/tools/index.ts index 35398d13d3f..e5eda2ee77b 100644 --- a/packages/server/src/controllers/tools/index.ts +++ b/packages/server/src/controllers/tools/index.ts @@ -1,4 +1,4 @@ -import { Request, Response, NextFunction } from 'express' +import { NextFunction, Request, Response } from 'express' import toolsService from '../../services/tools' import { InternalFlowiseError } from '../../errors/internalFlowiseError' import { StatusCodes } from 'http-status-codes' @@ -8,7 +8,18 @@ const createTool = async (req: Request, res: Response, next: NextFunction) => { if (!req.body) { throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: toolsController.createTool - body not provided!`) } - const apiResponse = await toolsService.createTool(req.body) + const orgId = req.user?.activeOrganizationId + if (!orgId) { + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Error: toolsController.createTool - organization ${orgId} not found!`) + } + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Error: toolsController.createTool - workspace ${workspaceId} not found!`) + } + const body = req.body + body.workspaceId = workspaceId + + const apiResponse = await toolsService.createTool(body, orgId) return res.json(apiResponse) } catch (error) { next(error) @@ -29,7 +40,7 @@ const deleteTool = async (req: Request, res: Response, next: NextFunction) => { const getAllTools = async (req: Request, res: Response, next: NextFunction) => { try { - const apiResponse = await toolsService.getAllTools() + const apiResponse = await toolsService.getAllTools(req.user?.activeWorkspaceId) return res.json(apiResponse) } catch (error) { next(error) diff --git a/packages/server/src/controllers/validation/index.ts b/packages/server/src/controllers/validation/index.ts index a73c5c71e83..6ce5da42642 100644 --- a/packages/server/src/controllers/validation/index.ts +++ b/packages/server/src/controllers/validation/index.ts @@ -12,7 +12,8 @@ const checkFlowValidation = async (req: Request, res: Response, next: NextFuncti `Error: validationController.checkFlowValidation - id not provided!` ) } - const apiResponse = await validationService.checkFlowValidation(flowId) + const workspaceId = req.user?.activeWorkspaceId + const apiResponse = await validationService.checkFlowValidation(flowId, workspaceId) return res.json(apiResponse) } catch (error) { next(error) diff --git a/packages/server/src/controllers/variables/index.ts b/packages/server/src/controllers/variables/index.ts index a124255a1ec..3f1417b917d 100644 --- a/packages/server/src/controllers/variables/index.ts +++ b/packages/server/src/controllers/variables/index.ts @@ -12,10 +12,19 @@ const createVariable = async (req: Request, res: Response, next: NextFunction) = `Error: variablesController.createVariable - body not provided!` ) } + const orgId = req.user?.activeOrganizationId + if (!orgId) { + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Error: toolsController.createTool - organization ${orgId} not found!`) + } + const workspaceId = req.user?.activeWorkspaceId + if (!workspaceId) { + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Error: toolsController.createTool - workspace ${workspaceId} not found!`) + } const body = req.body + body.workspaceId = workspaceId const newVariable = new Variable() Object.assign(newVariable, body) - const apiResponse = await variablesService.createVariable(newVariable) + const apiResponse = await variablesService.createVariable(newVariable, orgId) return res.json(apiResponse) } catch (error) { next(error) @@ -36,7 +45,7 @@ const deleteVariable = async (req: Request, res: Response, next: NextFunction) = const getAllVariables = async (req: Request, res: Response, next: NextFunction) => { try { - const apiResponse = await variablesService.getAllVariables() + const apiResponse = await variablesService.getAllVariables(req.user?.activeWorkspaceId) return res.json(apiResponse) } catch (error) { next(error) diff --git a/packages/server/src/database/entities/ApiKey.ts b/packages/server/src/database/entities/ApiKey.ts index d96610df20a..e7c1d84e55d 100644 --- a/packages/server/src/database/entities/ApiKey.ts +++ b/packages/server/src/database/entities/ApiKey.ts @@ -18,4 +18,7 @@ export class ApiKey implements IApiKey { @Column({ type: 'timestamp' }) @UpdateDateColumn() updatedDate: Date + + @Column({ nullable: true, type: 'text' }) + workspaceId?: string } diff --git a/packages/server/src/database/entities/Assistant.ts b/packages/server/src/database/entities/Assistant.ts index 2b4a6d3bf4c..28843213928 100644 --- a/packages/server/src/database/entities/Assistant.ts +++ b/packages/server/src/database/entities/Assistant.ts @@ -26,4 +26,7 @@ export class Assistant implements IAssistant { @Column({ type: 'timestamp' }) @UpdateDateColumn() updatedDate: Date + + @Column({ nullable: true, type: 'text' }) + workspaceId?: string } diff --git a/packages/server/src/database/entities/ChatFlow.ts b/packages/server/src/database/entities/ChatFlow.ts index b3a07b82dcc..4c14e99c1c4 100644 --- a/packages/server/src/database/entities/ChatFlow.ts +++ b/packages/server/src/database/entities/ChatFlow.ts @@ -50,4 +50,7 @@ export class ChatFlow implements IChatFlow { @Column({ type: 'timestamp' }) @UpdateDateColumn() updatedDate: Date + + @Column({ nullable: true, type: 'text' }) + workspaceId?: string } diff --git a/packages/server/src/database/entities/Credential.ts b/packages/server/src/database/entities/Credential.ts index daeb0595ee1..2c43158c487 100644 --- a/packages/server/src/database/entities/Credential.ts +++ b/packages/server/src/database/entities/Credential.ts @@ -23,4 +23,7 @@ export class Credential implements ICredential { @Column({ type: 'timestamp' }) @UpdateDateColumn() updatedDate: Date + + @Column({ nullable: true, type: 'text' }) + workspaceId?: string } diff --git a/packages/server/src/database/entities/CustomTemplate.ts b/packages/server/src/database/entities/CustomTemplate.ts index 27b88d78e9b..e45719e69d4 100644 --- a/packages/server/src/database/entities/CustomTemplate.ts +++ b/packages/server/src/database/entities/CustomTemplate.ts @@ -27,6 +27,9 @@ export class CustomTemplate implements ICustomTemplate { @Column({ nullable: true, type: 'text' }) type?: string + @Column({ nullable: true, type: 'text' }) + workspaceId: string + @Column({ type: 'timestamp' }) @CreateDateColumn() createdDate: Date diff --git a/packages/server/src/database/entities/Dataset.ts b/packages/server/src/database/entities/Dataset.ts new file mode 100644 index 00000000000..8dd604d3d01 --- /dev/null +++ b/packages/server/src/database/entities/Dataset.ts @@ -0,0 +1,24 @@ +/* eslint-disable */ +import { Entity, Column, CreateDateColumn, UpdateDateColumn, PrimaryGeneratedColumn } from 'typeorm' +import { IAssistant, IDataset } from '../../Interface' + +@Entity() +export class Dataset implements IDataset { + @PrimaryGeneratedColumn('uuid') + id: string + + @Column({ type: 'text' }) + name: string + + @Column({ type: 'text' }) + description: string + + @CreateDateColumn() + createdDate: Date + + @UpdateDateColumn() + updatedDate: Date + + @Column({ nullable: true, type: 'text' }) + workspaceId?: string +} diff --git a/packages/server/src/database/entities/DatasetRow.ts b/packages/server/src/database/entities/DatasetRow.ts new file mode 100644 index 00000000000..a2a3c1fb3f4 --- /dev/null +++ b/packages/server/src/database/entities/DatasetRow.ts @@ -0,0 +1,25 @@ +/* eslint-disable */ +import { Entity, Column, CreateDateColumn, UpdateDateColumn, PrimaryGeneratedColumn, Index } from 'typeorm' +import { IAssistant, IDataset, IDatasetRow } from '../../Interface' + +@Entity() +export class DatasetRow implements IDatasetRow { + @PrimaryGeneratedColumn('uuid') + id: string + + @Column({ type: 'text' }) + @Index() + datasetId: string + + @Column({ type: 'text' }) + input: string + + @Column({ type: 'text' }) + output: string + + @UpdateDateColumn() + updatedDate: Date + + @Column({ name: 'sequence_no' }) + sequenceNo: number +} diff --git a/packages/server/src/database/entities/DocumentStore.ts b/packages/server/src/database/entities/DocumentStore.ts index 694db3e3db9..01babca474b 100644 --- a/packages/server/src/database/entities/DocumentStore.ts +++ b/packages/server/src/database/entities/DocumentStore.ts @@ -37,4 +37,7 @@ export class DocumentStore implements IDocumentStore { @Column({ nullable: true, type: 'text' }) recordManagerConfig: string | null + + @Column({ nullable: true, type: 'text' }) + workspaceId?: string } diff --git a/packages/server/src/database/entities/Evaluation.ts b/packages/server/src/database/entities/Evaluation.ts new file mode 100644 index 00000000000..85128ae0173 --- /dev/null +++ b/packages/server/src/database/entities/Evaluation.ts @@ -0,0 +1,41 @@ +import { Column, Entity, PrimaryGeneratedColumn, UpdateDateColumn } from 'typeorm' +import { IEvaluation } from '../../Interface' + +@Entity() +export class Evaluation implements IEvaluation { + @PrimaryGeneratedColumn('uuid') + id: string + + @Column({ type: 'text' }) + average_metrics: string + + @Column({ type: 'text' }) + additionalConfig: string + + @Column() + name: string + + @Column() + evaluationType: string + + @Column() + chatflowId: string + + @Column() + chatflowName: string + + @Column() + datasetId: string + + @Column() + datasetName: string + + @Column() + status: string + + @UpdateDateColumn() + runDate: Date + + @Column({ nullable: true, type: 'text' }) + workspaceId?: string +} diff --git a/packages/server/src/database/entities/EvaluationRun.ts b/packages/server/src/database/entities/EvaluationRun.ts new file mode 100644 index 00000000000..531ec0095e0 --- /dev/null +++ b/packages/server/src/database/entities/EvaluationRun.ts @@ -0,0 +1,35 @@ +import { Column, Entity, PrimaryGeneratedColumn, UpdateDateColumn } from 'typeorm' +import { IEvaluationRun } from '../../Interface' + +@Entity() +export class EvaluationRun implements IEvaluationRun { + @PrimaryGeneratedColumn('uuid') + id: string + + @Column() + evaluationId: string + + @Column({ type: 'text' }) + input: string + + @Column({ type: 'text' }) + expectedOutput: string + + @UpdateDateColumn() + runDate: Date + + @Column({ type: 'text' }) + actualOutput: string + + @Column({ type: 'text' }) + metrics: string + + @Column({ type: 'text' }) + llmEvaluators: string + + @Column({ type: 'text' }) + evaluators: string + + @Column({ type: 'text' }) + errors: string +} diff --git a/packages/server/src/database/entities/Evaluator.ts b/packages/server/src/database/entities/Evaluator.ts new file mode 100644 index 00000000000..8e7f6f9b962 --- /dev/null +++ b/packages/server/src/database/entities/Evaluator.ts @@ -0,0 +1,28 @@ +import { Column, CreateDateColumn, Entity, PrimaryGeneratedColumn, UpdateDateColumn } from 'typeorm' +import { IEvaluator } from '../../Interface' + +//1714808591644 + +@Entity() +export class Evaluator implements IEvaluator { + @PrimaryGeneratedColumn('uuid') + id: string + + @Column() + name: string + + @Column() + type: string + + @Column() + config: string + + @CreateDateColumn() + createdDate: Date + + @UpdateDateColumn() + updatedDate: Date + + @Column({ nullable: true, type: 'text' }) + workspaceId?: string +} diff --git a/packages/server/src/database/entities/Execution.ts b/packages/server/src/database/entities/Execution.ts index 483a10ff17b..87885cf8a3f 100644 --- a/packages/server/src/database/entities/Execution.ts +++ b/packages/server/src/database/entities/Execution.ts @@ -18,7 +18,7 @@ export class Execution implements IExecution { agentflowId: string @Index() - @Column({ type: 'uuid' }) + @Column({ type: 'varchar' }) sessionId: string @Column({ nullable: true, type: 'text' }) @@ -41,4 +41,7 @@ export class Execution implements IExecution { @ManyToOne(() => ChatFlow) @JoinColumn({ name: 'agentflowId' }) agentflow: ChatFlow + + @Column({ nullable: true, type: 'text' }) + workspaceId?: string } diff --git a/packages/server/src/database/entities/Tool.ts b/packages/server/src/database/entities/Tool.ts index 49f7335e1cc..3a0dcbc898a 100644 --- a/packages/server/src/database/entities/Tool.ts +++ b/packages/server/src/database/entities/Tool.ts @@ -32,4 +32,7 @@ export class Tool implements ITool { @Column({ type: 'timestamp' }) @UpdateDateColumn() updatedDate: Date + + @Column({ nullable: true, type: 'text' }) + workspaceId?: string } diff --git a/packages/server/src/database/entities/Variable.ts b/packages/server/src/database/entities/Variable.ts index 2437e824513..6a8006dd67e 100644 --- a/packages/server/src/database/entities/Variable.ts +++ b/packages/server/src/database/entities/Variable.ts @@ -23,4 +23,7 @@ export class Variable implements IVariable { @Column({ type: 'timestamp' }) @UpdateDateColumn() updatedDate: Date + + @Column({ nullable: true, type: 'text' }) + workspaceId?: string } diff --git a/packages/server/src/database/entities/index.ts b/packages/server/src/database/entities/index.ts index c9152a1d7fa..b65ea28b58a 100644 --- a/packages/server/src/database/entities/index.ts +++ b/packages/server/src/database/entities/index.ts @@ -9,9 +9,22 @@ import { DocumentStore } from './DocumentStore' import { DocumentStoreFileChunk } from './DocumentStoreFileChunk' import { Lead } from './Lead' import { UpsertHistory } from './UpsertHistory' +import { Dataset } from './Dataset' +import { DatasetRow } from './DatasetRow' +import { EvaluationRun } from './EvaluationRun' +import { Evaluation } from './Evaluation' +import { Evaluator } from './Evaluator' import { ApiKey } from './ApiKey' import { CustomTemplate } from './CustomTemplate' import { Execution } from './Execution' +import { LoginActivity, WorkspaceShared, WorkspaceUsers } from '../../enterprise/database/entities/EnterpriseEntities' +import { User } from '../../enterprise/database/entities/user.entity' +import { Organization } from '../../enterprise/database/entities/organization.entity' +import { Role } from '../../enterprise/database/entities/role.entity' +import { OrganizationUser } from '../../enterprise/database/entities/organization-user.entity' +import { Workspace } from '../../enterprise/database/entities/workspace.entity' +import { WorkspaceUser } from '../../enterprise/database/entities/workspace-user.entity' +import { LoginMethod } from '../../enterprise/database/entities/login-method.entity' export const entities = { ChatFlow, @@ -21,11 +34,26 @@ export const entities = { Tool, Assistant, Variable, + UpsertHistory, DocumentStore, DocumentStoreFileChunk, Lead, - UpsertHistory, + Dataset, + DatasetRow, + Evaluation, + EvaluationRun, + Evaluator, ApiKey, + User, + WorkspaceUsers, + LoginActivity, + WorkspaceShared, CustomTemplate, - Execution + Execution, + Organization, + Role, + OrganizationUser, + Workspace, + WorkspaceUser, + LoginMethod } diff --git a/packages/server/src/database/migrations/mariadb/1714548873039-AddEvaluation.ts b/packages/server/src/database/migrations/mariadb/1714548873039-AddEvaluation.ts new file mode 100644 index 00000000000..d061d34762d --- /dev/null +++ b/packages/server/src/database/migrations/mariadb/1714548873039-AddEvaluation.ts @@ -0,0 +1,41 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddEvaluation1714548873039 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS \`evaluation\` ( + \`id\` varchar(36) NOT NULL, + \`chatflowId\` LONGTEXT NOT NULL, + \`datasetId\` LONGTEXT NOT NULL, + \`name\` varchar(255) NOT NULL, + \`chatflowName\` varchar(255) NOT NULL, + \`datasetName\` varchar(255) NOT NULL, + \`additionalConfig\` LONGTEXT, + \`average_metrics\` LONGTEXT NOT NULL, + \`status\` varchar(10) NOT NULL, + \`evaluationType\` varchar(20) NOT NULL, + \`runDate\` datetime(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6) ON UPDATE CURRENT_TIMESTAMP(6), + PRIMARY KEY (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_520_ci;` + ) + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS \`evaluation_run\` ( + \`id\` varchar(36) NOT NULL, + \`evaluationId\` varchar(36) NOT NULL, + \`expectedOutput\` LONGTEXT NOT NULL, + \`actualOutput\` LONGTEXT NOT NULL, + \`evaluators\` LONGTEXT, + \`input\` LONGTEXT DEFAULT NULL, + \`metrics\` TEXT DEFAULT NULL, + \`llmEvaluators\` TEXT DEFAULT NULL, + \`runDate\` datetime(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6) ON UPDATE CURRENT_TIMESTAMP(6), + PRIMARY KEY (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_520_ci;` + ) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP TABLE evaluation`) + await queryRunner.query(`DROP TABLE evaluation_run`) + } +} diff --git a/packages/server/src/database/migrations/mariadb/1714548903384-AddDataset.ts b/packages/server/src/database/migrations/mariadb/1714548903384-AddDataset.ts new file mode 100644 index 00000000000..641834e10f0 --- /dev/null +++ b/packages/server/src/database/migrations/mariadb/1714548903384-AddDataset.ts @@ -0,0 +1,31 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddDatasets1714548903384 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS \`dataset\` ( + \`id\` varchar(36) NOT NULL, + \`name\` varchar(255) NOT NULL, + \`description\` varchar(255) DEFAULT NULL, + \`createdDate\` datetime(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6), + \`updatedDate\` datetime(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6) ON UPDATE CURRENT_TIMESTAMP(6), + PRIMARY KEY (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_520_ci;` + ) + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS \`dataset_row\` ( + \`id\` varchar(36) NOT NULL, + \`datasetId\` varchar(36) NOT NULL, + \`input\` LONGTEXT NOT NULL, + \`output\` LONGTEXT DEFAULT NULL, + \`updatedDate\` datetime(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6) ON UPDATE CURRENT_TIMESTAMP(6), + PRIMARY KEY (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_520_ci;` + ) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP TABLE dataset`) + await queryRunner.query(`DROP TABLE dataset_row`) + } +} diff --git a/packages/server/src/database/migrations/mariadb/1714808591644-AddEvaluator.ts b/packages/server/src/database/migrations/mariadb/1714808591644-AddEvaluator.ts new file mode 100644 index 00000000000..81d081570f6 --- /dev/null +++ b/packages/server/src/database/migrations/mariadb/1714808591644-AddEvaluator.ts @@ -0,0 +1,21 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddEvaluator1714808591644 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS \`evaluator\` ( + \`id\` varchar(36) NOT NULL, + \`name\` varchar(255) NOT NULL, + \`type\` varchar(25) DEFAULT NULL, + \`config\` LONGTEXT DEFAULT NULL, + \`createdDate\` datetime(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6), + \`updatedDate\` datetime(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6) ON UPDATE CURRENT_TIMESTAMP(6), + PRIMARY KEY (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_520_ci;` + ) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP TABLE evaluator`) + } +} diff --git a/packages/server/src/database/migrations/mariadb/1733752119696-AddSeqNoToDatasetRow.ts b/packages/server/src/database/migrations/mariadb/1733752119696-AddSeqNoToDatasetRow.ts new file mode 100644 index 00000000000..e6ec131f06d --- /dev/null +++ b/packages/server/src/database/migrations/mariadb/1733752119696-AddSeqNoToDatasetRow.ts @@ -0,0 +1,12 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddSeqNoToDatasetRow1733752119696 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + const columnExists = await queryRunner.hasColumn('dataset_row', 'sequence_no') + if (!columnExists) queryRunner.query(`ALTER TABLE \`dataset_row\` ADD COLUMN \`sequence_no\` INT DEFAULT -1;`) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE "dataset_row" DROP COLUMN "sequence_no";`) + } +} diff --git a/packages/server/src/database/migrations/mariadb/1743758056188-FixOpenSourceAssistantTable.ts b/packages/server/src/database/migrations/mariadb/1743758056188-FixOpenSourceAssistantTable.ts new file mode 100644 index 00000000000..54138c3906f --- /dev/null +++ b/packages/server/src/database/migrations/mariadb/1743758056188-FixOpenSourceAssistantTable.ts @@ -0,0 +1,24 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' +import { Assistant } from '../../entities/Assistant' + +export class FixOpenSourceAssistantTable1743758056188 implements MigrationInterface { + name = 'FixOpenSourceAssistantTable1743758056188' + + public async up(queryRunner: QueryRunner): Promise { + const columnExists = await queryRunner.hasColumn('assistant', 'type') + if (!columnExists) { + await queryRunner.query(`ALTER TABLE \`assistant\` ADD COLUMN \`type\` TEXT;`) + await queryRunner.query(`UPDATE \`assistant\` SET \`type\` = 'OPENAI';`) + + const assistants: Assistant[] = await queryRunner.query(`SELECT * FROM \`assistant\`;`) + for (let assistant of assistants) { + const details = JSON.parse(assistant.details) + if (!details?.id) await queryRunner.query(`UPDATE \`assistant\` SET \`type\` = 'CUSTOM' WHERE id = '${assistant.id}';`) + } + } + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE \`assistant\` DROP COLUMN \`type\`;`) + } +} diff --git a/packages/server/src/database/migrations/mariadb/1744964560174-AddErrorToEvaluationRun.ts b/packages/server/src/database/migrations/mariadb/1744964560174-AddErrorToEvaluationRun.ts new file mode 100644 index 00000000000..9a5d6488c56 --- /dev/null +++ b/packages/server/src/database/migrations/mariadb/1744964560174-AddErrorToEvaluationRun.ts @@ -0,0 +1,12 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddErrorToEvaluationRun1744964560174 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + const columnExists = await queryRunner.hasColumn('evaluation_run', 'errors') + if (!columnExists) queryRunner.query(`ALTER TABLE \`evaluation_run\` ADD COLUMN \`errors\` LONGTEXT NULL DEFAULT '[]';`) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE "evaluation_run" DROP COLUMN "errors";`) + } +} diff --git a/packages/server/src/database/migrations/mariadb/1747902489801-ModifyExecutionDataColumnType.ts b/packages/server/src/database/migrations/mariadb/1747902489801-ModifyExecutionDataColumnType.ts new file mode 100644 index 00000000000..e0afad3ed7c --- /dev/null +++ b/packages/server/src/database/migrations/mariadb/1747902489801-ModifyExecutionDataColumnType.ts @@ -0,0 +1,11 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class ModifyExecutionDataColumnType1747902489801 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + queryRunner.query(`ALTER TABLE \`execution\` MODIFY COLUMN \`executionData\` LONGTEXT NOT NULL;`) + } + + public async down(queryRunner: QueryRunner): Promise { + queryRunner.query(`ALTER TABLE \`execution\` MODIFY COLUMN \`executionData\` TEXT NOT NULL;`) + } +} diff --git a/packages/server/src/database/migrations/mariadb/index.ts b/packages/server/src/database/migrations/mariadb/index.ts index 11d7741789b..272a6bb1ff2 100644 --- a/packages/server/src/database/migrations/mariadb/index.ts +++ b/packages/server/src/database/migrations/mariadb/index.ts @@ -17,9 +17,12 @@ import { AddFeedback1707213626553 } from './1707213626553-AddFeedback' import { AddUpsertHistoryEntity1709814301358 } from './1709814301358-AddUpsertHistoryEntity' import { AddLead1710832127079 } from './1710832127079-AddLead' import { AddLeadToChatMessage1711538023578 } from './1711538023578-AddLeadToChatMessage' -import { AddVectorStoreConfigToDocStore1715861032479 } from './1715861032479-AddVectorStoreConfigToDocStore' import { AddDocumentStore1711637331047 } from './1711637331047-AddDocumentStore' +import { AddEvaluation1714548873039 } from './1714548873039-AddEvaluation' +import { AddDatasets1714548903384 } from './1714548903384-AddDataset' import { AddAgentReasoningToChatMessage1714679514451 } from './1714679514451-AddAgentReasoningToChatMessage' +import { AddEvaluator1714808591644 } from './1714808591644-AddEvaluator' +import { AddVectorStoreConfigToDocStore1715861032479 } from './1715861032479-AddVectorStoreConfigToDocStore' import { AddTypeToChatFlow1716300000000 } from './1716300000000-AddTypeToChatFlow' import { AddApiKey1720230151480 } from './1720230151480-AddApiKey' import { AddActionToChatMessage1721078251523 } from './1721078251523-AddActionToChatMessage' @@ -28,7 +31,23 @@ import { AddCustomTemplate1725629836652 } from './1725629836652-AddCustomTemplat import { AddArtifactsToChatMessage1726156258465 } from './1726156258465-AddArtifactsToChatMessage' import { AddFollowUpPrompts1726666318346 } from './1726666318346-AddFollowUpPrompts' import { AddTypeToAssistant1733011290987 } from './1733011290987-AddTypeToAssistant' +import { AddSeqNoToDatasetRow1733752119696 } from './1733752119696-AddSeqNoToDatasetRow' import { AddExecutionEntity1738090872625 } from './1738090872625-AddExecutionEntity' +import { FixOpenSourceAssistantTable1743758056188 } from './1743758056188-FixOpenSourceAssistantTable' +import { AddErrorToEvaluationRun1744964560174 } from './1744964560174-AddErrorToEvaluationRun' +import { ModifyExecutionDataColumnType1747902489801 } from './1747902489801-ModifyExecutionDataColumnType' + +import { AddAuthTables1720230151482 } from '../../../enterprise/database/migrations/mariadb/1720230151482-AddAuthTables' +import { AddWorkspace1725437498242 } from '../../../enterprise/database/migrations/mariadb/1725437498242-AddWorkspace' +import { AddWorkspaceShared1726654922034 } from '../../../enterprise/database/migrations/mariadb/1726654922034-AddWorkspaceShared' +import { AddWorkspaceIdToCustomTemplate1726655750383 } from '../../../enterprise/database/migrations/mariadb/1726655750383-AddWorkspaceIdToCustomTemplate' +import { AddOrganization1727798417345 } from '../../../enterprise/database/migrations/mariadb/1727798417345-AddOrganization' +import { LinkWorkspaceId1729130948686 } from '../../../enterprise/database/migrations/mariadb/1729130948686-LinkWorkspaceId' +import { LinkOrganizationId1729133111652 } from '../../../enterprise/database/migrations/mariadb/1729133111652-LinkOrganizationId' +import { AddSSOColumns1730519457880 } from '../../../enterprise/database/migrations/mariadb/1730519457880-AddSSOColumns' +import { AddPersonalWorkspace1734074497540 } from '../../../enterprise/database/migrations/mariadb/1734074497540-AddPersonalWorkspace' +import { RefactorEnterpriseDatabase1737076223692 } from '../../../enterprise/database/migrations/mariadb/1737076223692-RefactorEnterpriseDatabase' +import { ExecutionLinkWorkspaceId1746862866554 } from '../../../enterprise/database/migrations/mariadb/1746862866554-ExecutionLinkWorkspaceId' export const mariadbMigrations = [ Init1693840429259, @@ -51,15 +70,33 @@ export const mariadbMigrations = [ AddDocumentStore1711637331047, AddLead1710832127079, AddLeadToChatMessage1711538023578, + AddEvaluation1714548873039, + AddDatasets1714548903384, AddAgentReasoningToChatMessage1714679514451, - AddTypeToChatFlow1716300000000, + AddEvaluator1714808591644, AddVectorStoreConfigToDocStore1715861032479, + AddTypeToChatFlow1716300000000, AddApiKey1720230151480, AddActionToChatMessage1721078251523, LongTextColumn1722301395521, AddCustomTemplate1725629836652, - AddArtifactsToChatMessage1726156258465, AddFollowUpPrompts1726666318346, AddTypeToAssistant1733011290987, - AddExecutionEntity1738090872625 + AddArtifactsToChatMessage1726156258465, + AddAuthTables1720230151482, + AddWorkspace1725437498242, + AddWorkspaceShared1726654922034, + AddWorkspaceIdToCustomTemplate1726655750383, + AddOrganization1727798417345, + LinkWorkspaceId1729130948686, + LinkOrganizationId1729133111652, + AddSSOColumns1730519457880, + AddSeqNoToDatasetRow1733752119696, + AddPersonalWorkspace1734074497540, + RefactorEnterpriseDatabase1737076223692, + AddExecutionEntity1738090872625, + FixOpenSourceAssistantTable1743758056188, + AddErrorToEvaluationRun1744964560174, + ExecutionLinkWorkspaceId1746862866554, + ModifyExecutionDataColumnType1747902489801 ] diff --git a/packages/server/src/database/migrations/mysql/1714548873039-AddEvaluation.ts b/packages/server/src/database/migrations/mysql/1714548873039-AddEvaluation.ts new file mode 100644 index 00000000000..0a41682f65a --- /dev/null +++ b/packages/server/src/database/migrations/mysql/1714548873039-AddEvaluation.ts @@ -0,0 +1,41 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddEvaluation1714548873039 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS \`evaluation\` ( + \`id\` varchar(36) NOT NULL, + \`chatflowId\` LONGTEXT NOT NULL, + \`datasetId\` LONGTEXT NOT NULL, + \`name\` varchar(255) NOT NULL, + \`chatflowName\` varchar(255) NOT NULL, + \`datasetName\` varchar(255) NOT NULL, + \`additionalConfig\` LONGTEXT, + \`average_metrics\` LONGTEXT NOT NULL, + \`status\` varchar(10) NOT NULL, + \`evaluationType\` varchar(20) NOT NULL, + \`runDate\` datetime(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6) ON UPDATE CURRENT_TIMESTAMP(6), + PRIMARY KEY (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;` + ) + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS \`evaluation_run\` ( + \`id\` varchar(36) NOT NULL, + \`evaluationId\` varchar(36) NOT NULL, + \`expectedOutput\` LONGTEXT NOT NULL, + \`actualOutput\` LONGTEXT NOT NULL, + \`evaluators\` LONGTEXT, + \`input\` LONGTEXT DEFAULT NULL, + \`metrics\` TEXT DEFAULT NULL, + \`llmEvaluators\` TEXT DEFAULT NULL, + \`runDate\` datetime(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6) ON UPDATE CURRENT_TIMESTAMP(6), + PRIMARY KEY (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;` + ) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP TABLE evaluation`) + await queryRunner.query(`DROP TABLE evaluation_run`) + } +} diff --git a/packages/server/src/database/migrations/mysql/1714548903384-AddDataset.ts b/packages/server/src/database/migrations/mysql/1714548903384-AddDataset.ts new file mode 100644 index 00000000000..e6ca36f94ef --- /dev/null +++ b/packages/server/src/database/migrations/mysql/1714548903384-AddDataset.ts @@ -0,0 +1,31 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddDatasets1714548903384 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS \`dataset\` ( + \`id\` varchar(36) NOT NULL, + \`name\` varchar(255) NOT NULL, + \`description\` varchar(255) DEFAULT NULL, + \`createdDate\` datetime(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6), + \`updatedDate\` datetime(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6) ON UPDATE CURRENT_TIMESTAMP(6), + PRIMARY KEY (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;` + ) + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS \`dataset_row\` ( + \`id\` varchar(36) NOT NULL, + \`datasetId\` varchar(36) NOT NULL, + \`input\` LONGTEXT NOT NULL, + \`output\` LONGTEXT DEFAULT NULL, + \`updatedDate\` datetime(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6) ON UPDATE CURRENT_TIMESTAMP(6), + PRIMARY KEY (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;` + ) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP TABLE dataset`) + await queryRunner.query(`DROP TABLE dataset_row`) + } +} diff --git a/packages/server/src/database/migrations/mysql/1714808591644-AddEvaluator.ts b/packages/server/src/database/migrations/mysql/1714808591644-AddEvaluator.ts new file mode 100644 index 00000000000..4c9e63dac9d --- /dev/null +++ b/packages/server/src/database/migrations/mysql/1714808591644-AddEvaluator.ts @@ -0,0 +1,21 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddEvaluator1714808591644 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS \`evaluator\` ( + \`id\` varchar(36) NOT NULL, + \`name\` varchar(255) NOT NULL, + \`type\` varchar(25) DEFAULT NULL, + \`config\` LONGTEXT DEFAULT NULL, + \`createdDate\` datetime(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6), + \`updatedDate\` datetime(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6) ON UPDATE CURRENT_TIMESTAMP(6), + PRIMARY KEY (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;` + ) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP TABLE evaluator`) + } +} diff --git a/packages/server/src/database/migrations/mysql/1733752119696-AddSeqNoToDatasetRow.ts b/packages/server/src/database/migrations/mysql/1733752119696-AddSeqNoToDatasetRow.ts new file mode 100644 index 00000000000..f3d7f72bbd8 --- /dev/null +++ b/packages/server/src/database/migrations/mysql/1733752119696-AddSeqNoToDatasetRow.ts @@ -0,0 +1,12 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddSeqNoToDatasetRow1733752119696 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + const columnExists = await queryRunner.hasColumn('dataset_row', 'sequence_no') + if (!columnExists) queryRunner.query(`ALTER TABLE \`dataset_row\` ADD COLUMN \`sequence_no\` INT DEFAULT -1;`) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE "dataset_row" DROP COLUMN "sequence_no";`) + } +} diff --git a/packages/server/src/database/migrations/mysql/1743758056188-FixOpenSourceAssistantTable.ts b/packages/server/src/database/migrations/mysql/1743758056188-FixOpenSourceAssistantTable.ts new file mode 100644 index 00000000000..54138c3906f --- /dev/null +++ b/packages/server/src/database/migrations/mysql/1743758056188-FixOpenSourceAssistantTable.ts @@ -0,0 +1,24 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' +import { Assistant } from '../../entities/Assistant' + +export class FixOpenSourceAssistantTable1743758056188 implements MigrationInterface { + name = 'FixOpenSourceAssistantTable1743758056188' + + public async up(queryRunner: QueryRunner): Promise { + const columnExists = await queryRunner.hasColumn('assistant', 'type') + if (!columnExists) { + await queryRunner.query(`ALTER TABLE \`assistant\` ADD COLUMN \`type\` TEXT;`) + await queryRunner.query(`UPDATE \`assistant\` SET \`type\` = 'OPENAI';`) + + const assistants: Assistant[] = await queryRunner.query(`SELECT * FROM \`assistant\`;`) + for (let assistant of assistants) { + const details = JSON.parse(assistant.details) + if (!details?.id) await queryRunner.query(`UPDATE \`assistant\` SET \`type\` = 'CUSTOM' WHERE id = '${assistant.id}';`) + } + } + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE \`assistant\` DROP COLUMN \`type\`;`) + } +} diff --git a/packages/server/src/database/migrations/mysql/1744964560174-AddErrorToEvaluationRun.ts b/packages/server/src/database/migrations/mysql/1744964560174-AddErrorToEvaluationRun.ts new file mode 100644 index 00000000000..32fb4218f9d --- /dev/null +++ b/packages/server/src/database/migrations/mysql/1744964560174-AddErrorToEvaluationRun.ts @@ -0,0 +1,12 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddErrorToEvaluationRun1744964560174 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + const columnExists = await queryRunner.hasColumn('evaluation_run', 'errors') + if (!columnExists) queryRunner.query(`ALTER TABLE \`evaluation_run\` ADD COLUMN \`errors\` LONGTEXT NULL DEFAULT ('[]');`) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE "evaluation_run" DROP COLUMN "errors";`) + } +} diff --git a/packages/server/src/database/migrations/mysql/1746437114935-FixErrorsColumnInEvaluationRun.ts b/packages/server/src/database/migrations/mysql/1746437114935-FixErrorsColumnInEvaluationRun.ts new file mode 100644 index 00000000000..d9f80f86962 --- /dev/null +++ b/packages/server/src/database/migrations/mysql/1746437114935-FixErrorsColumnInEvaluationRun.ts @@ -0,0 +1,12 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class FixErrorsColumnInEvaluationRun1746437114935 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + const columnExists = await queryRunner.hasColumn('evaluation_run', 'errors') + if (!columnExists) queryRunner.query(`ALTER TABLE \`evaluation_run\` ADD COLUMN \`errors\` LONGTEXT NULL DEFAULT ('[]');`) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE "evaluation_run" DROP COLUMN "errors";`) + } +} diff --git a/packages/server/src/database/migrations/mysql/1747902489801-ModifyExecutionDataColumnType.ts b/packages/server/src/database/migrations/mysql/1747902489801-ModifyExecutionDataColumnType.ts new file mode 100644 index 00000000000..e0afad3ed7c --- /dev/null +++ b/packages/server/src/database/migrations/mysql/1747902489801-ModifyExecutionDataColumnType.ts @@ -0,0 +1,11 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class ModifyExecutionDataColumnType1747902489801 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + queryRunner.query(`ALTER TABLE \`execution\` MODIFY COLUMN \`executionData\` LONGTEXT NOT NULL;`) + } + + public async down(queryRunner: QueryRunner): Promise { + queryRunner.query(`ALTER TABLE \`execution\` MODIFY COLUMN \`executionData\` TEXT NOT NULL;`) + } +} diff --git a/packages/server/src/database/migrations/mysql/index.ts b/packages/server/src/database/migrations/mysql/index.ts index fcd0541311e..c51ebb8a945 100644 --- a/packages/server/src/database/migrations/mysql/index.ts +++ b/packages/server/src/database/migrations/mysql/index.ts @@ -17,9 +17,12 @@ import { AddFeedback1707213626553 } from './1707213626553-AddFeedback' import { AddUpsertHistoryEntity1709814301358 } from './1709814301358-AddUpsertHistoryEntity' import { AddLead1710832127079 } from './1710832127079-AddLead' import { AddLeadToChatMessage1711538023578 } from './1711538023578-AddLeadToChatMessage' -import { AddVectorStoreConfigToDocStore1715861032479 } from './1715861032479-AddVectorStoreConfigToDocStore' import { AddDocumentStore1711637331047 } from './1711637331047-AddDocumentStore' +import { AddEvaluation1714548873039 } from './1714548873039-AddEvaluation' +import { AddDatasets1714548903384 } from './1714548903384-AddDataset' import { AddAgentReasoningToChatMessage1714679514451 } from './1714679514451-AddAgentReasoningToChatMessage' +import { AddEvaluator1714808591644 } from './1714808591644-AddEvaluator' +import { AddVectorStoreConfigToDocStore1715861032479 } from './1715861032479-AddVectorStoreConfigToDocStore' import { AddTypeToChatFlow1716300000000 } from './1716300000000-AddTypeToChatFlow' import { AddApiKey1720230151480 } from './1720230151480-AddApiKey' import { AddActionToChatMessage1721078251523 } from './1721078251523-AddActionToChatMessage' @@ -28,7 +31,24 @@ import { AddCustomTemplate1725629836652 } from './1725629836652-AddCustomTemplat import { AddArtifactsToChatMessage1726156258465 } from './1726156258465-AddArtifactsToChatMessage' import { AddFollowUpPrompts1726666302024 } from './1726666302024-AddFollowUpPrompts' import { AddTypeToAssistant1733011290987 } from './1733011290987-AddTypeToAssistant' +import { AddSeqNoToDatasetRow1733752119696 } from './1733752119696-AddSeqNoToDatasetRow' import { AddExecutionEntity1738090872625 } from './1738090872625-AddExecutionEntity' +import { FixOpenSourceAssistantTable1743758056188 } from './1743758056188-FixOpenSourceAssistantTable' +import { AddErrorToEvaluationRun1744964560174 } from './1744964560174-AddErrorToEvaluationRun' +import { FixErrorsColumnInEvaluationRun1746437114935 } from './1746437114935-FixErrorsColumnInEvaluationRun' +import { ModifyExecutionDataColumnType1747902489801 } from './1747902489801-ModifyExecutionDataColumnType' + +import { AddAuthTables1720230151482 } from '../../../enterprise/database/migrations/mysql/1720230151482-AddAuthTables' +import { AddWorkspace1720230151484 } from '../../../enterprise/database/migrations/mysql/1720230151484-AddWorkspace' +import { AddWorkspaceShared1726654922034 } from '../../../enterprise/database/migrations/mysql/1726654922034-AddWorkspaceShared' +import { AddWorkspaceIdToCustomTemplate1726655750383 } from '../../../enterprise/database/migrations/mysql/1726655750383-AddWorkspaceIdToCustomTemplate' +import { AddOrganization1727798417345 } from '../../../enterprise/database/migrations/mysql/1727798417345-AddOrganization' +import { LinkWorkspaceId1729130948686 } from '../../../enterprise/database/migrations/mysql/1729130948686-LinkWorkspaceId' +import { LinkOrganizationId1729133111652 } from '../../../enterprise/database/migrations/mysql/1729133111652-LinkOrganizationId' +import { AddSSOColumns1730519457880 } from '../../../enterprise/database/migrations/mysql/1730519457880-AddSSOColumns' +import { AddPersonalWorkspace1734074497540 } from '../../../enterprise/database/migrations/mysql/1734074497540-AddPersonalWorkspace' +import { RefactorEnterpriseDatabase1737076223692 } from '../../../enterprise/database/migrations/mysql/1737076223692-RefactorEnterpriseDatabase' +import { ExecutionLinkWorkspaceId1746862866554 } from '../../../enterprise/database/migrations/mysql/1746862866554-ExecutionLinkWorkspaceId' export const mysqlMigrations = [ Init1693840429259, @@ -48,12 +68,15 @@ export const mysqlMigrations = [ AddSpeechToText1706364937060, AddUpsertHistoryEntity1709814301358, AddFeedback1707213626553, + AddEvaluation1714548873039, + AddDatasets1714548903384, + AddEvaluator1714808591644, AddDocumentStore1711637331047, AddLead1710832127079, AddLeadToChatMessage1711538023578, AddAgentReasoningToChatMessage1714679514451, - AddTypeToChatFlow1716300000000, AddVectorStoreConfigToDocStore1715861032479, + AddTypeToChatFlow1716300000000, AddApiKey1720230151480, AddActionToChatMessage1721078251523, LongTextColumn1722301395521, @@ -61,5 +84,21 @@ export const mysqlMigrations = [ AddArtifactsToChatMessage1726156258465, AddFollowUpPrompts1726666302024, AddTypeToAssistant1733011290987, - AddExecutionEntity1738090872625 + AddAuthTables1720230151482, + AddWorkspace1720230151484, + AddWorkspaceShared1726654922034, + AddWorkspaceIdToCustomTemplate1726655750383, + AddOrganization1727798417345, + LinkWorkspaceId1729130948686, + LinkOrganizationId1729133111652, + AddSSOColumns1730519457880, + AddSeqNoToDatasetRow1733752119696, + AddPersonalWorkspace1734074497540, + RefactorEnterpriseDatabase1737076223692, + FixOpenSourceAssistantTable1743758056188, + AddExecutionEntity1738090872625, + AddErrorToEvaluationRun1744964560174, + FixErrorsColumnInEvaluationRun1746437114935, + ExecutionLinkWorkspaceId1746862866554, + ModifyExecutionDataColumnType1747902489801 ] diff --git a/packages/server/src/database/migrations/postgres/1714548873039-AddEvaluation.ts b/packages/server/src/database/migrations/postgres/1714548873039-AddEvaluation.ts new file mode 100644 index 00000000000..7a6a6aa06ff --- /dev/null +++ b/packages/server/src/database/migrations/postgres/1714548873039-AddEvaluation.ts @@ -0,0 +1,41 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddEvaluation1714548873039 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS evaluation ( + id uuid NOT NULL DEFAULT uuid_generate_v4(), + "name" varchar NOT NULL, + "chatflowId" text NOT NULL, + "chatflowName" text NOT NULL, + "datasetId" varchar NOT NULL, + "datasetName" varchar NOT NULL, + "additionalConfig" text NULL, + "evaluationType" varchar NOT NULL, + "status" varchar NOT NULL, + "average_metrics" text NULL, + "runDate" timestamp NOT NULL DEFAULT now(), + CONSTRAINT "PK_98989043dd804f54-9830ab99f8" PRIMARY KEY (id) + );` + ) + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS evaluation_run ( + id uuid NOT NULL DEFAULT uuid_generate_v4(), + "evaluationId" varchar NOT NULL, + "input" text NOT NULL, + "expectedOutput" text NULL, + "actualOutput" text NULL, + "evaluators" text NULL, + "llmEvaluators" text DEFAULT NULL, + "metrics" text NULL, + "runDate" timestamp NOT NULL DEFAULT now(), + CONSTRAINT "PK_98989927dd804f54-9840ab23f8" PRIMARY KEY (id) + );` + ) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP TABLE evaluation`) + await queryRunner.query(`DROP TABLE evaluation_run`) + } +} diff --git a/packages/server/src/database/migrations/postgres/1714548903384-AddDataset.ts b/packages/server/src/database/migrations/postgres/1714548903384-AddDataset.ts new file mode 100644 index 00000000000..0fadef3069e --- /dev/null +++ b/packages/server/src/database/migrations/postgres/1714548903384-AddDataset.ts @@ -0,0 +1,31 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddDatasets1714548903384 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS dataset ( + id uuid NOT NULL DEFAULT uuid_generate_v4(), + "name" varchar NOT NULL, + "description" varchar NULL, + "createdDate" timestamp NOT NULL DEFAULT now(), + "updatedDate" timestamp NOT NULL DEFAULT now(), + CONSTRAINT "PK_98419043dd804f54-9830ab99f8" PRIMARY KEY (id) + );` + ) + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS dataset_row ( + id uuid NOT NULL DEFAULT uuid_generate_v4(), + "datasetId" varchar NOT NULL, + "input" text NOT NULL, + "output" text NULL, + "updatedDate" timestamp NOT NULL DEFAULT now(), + CONSTRAINT "PK_98909027dd804f54-9840ab99f8" PRIMARY KEY (id) + );` + ) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP TABLE dataset`) + await queryRunner.query(`DROP TABLE dataset_row`) + } +} diff --git a/packages/server/src/database/migrations/postgres/1714808591644-AddEvaluator.ts b/packages/server/src/database/migrations/postgres/1714808591644-AddEvaluator.ts new file mode 100644 index 00000000000..a228e0c8fc1 --- /dev/null +++ b/packages/server/src/database/migrations/postgres/1714808591644-AddEvaluator.ts @@ -0,0 +1,21 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddEvaluator1714808591644 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS evaluator ( + id uuid NOT NULL DEFAULT uuid_generate_v4(), + "name" varchar NOT NULL, + "type" text NULL, + "config" text NULL, + "createdDate" timestamp NOT NULL DEFAULT now(), + "updatedDate" timestamp NOT NULL DEFAULT now(), + CONSTRAINT "PK_90019043dd804f54-9830ab11f8" PRIMARY KEY (id) + );` + ) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP TABLE evaluator`) + } +} diff --git a/packages/server/src/database/migrations/postgres/1733752119696-AddSeqNoToDatasetRow.ts b/packages/server/src/database/migrations/postgres/1733752119696-AddSeqNoToDatasetRow.ts new file mode 100644 index 00000000000..a49a6881652 --- /dev/null +++ b/packages/server/src/database/migrations/postgres/1733752119696-AddSeqNoToDatasetRow.ts @@ -0,0 +1,11 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddSeqNoToDatasetRow1733752119696 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE "dataset_row" ADD COLUMN IF NOT EXISTS "sequence_no" integer DEFAULT -1;`) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE "dataset_row" DROP COLUMN "sequence_no";`) + } +} diff --git a/packages/server/src/database/migrations/postgres/1743758056188-FixOpenSourceAssistantTable.ts b/packages/server/src/database/migrations/postgres/1743758056188-FixOpenSourceAssistantTable.ts new file mode 100644 index 00000000000..5400a1907dd --- /dev/null +++ b/packages/server/src/database/migrations/postgres/1743758056188-FixOpenSourceAssistantTable.ts @@ -0,0 +1,24 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' +import { Assistant } from '../../entities/Assistant' + +export class FixOpenSourceAssistantTable1743758056188 implements MigrationInterface { + name = 'FixOpenSourceAssistantTable1743758056188' + + public async up(queryRunner: QueryRunner): Promise { + const columnExists = await queryRunner.hasColumn('assistant', 'type') + if (!columnExists) { + await queryRunner.query(`ALTER TABLE "assistant" ADD COLUMN "type" TEXT;`) + await queryRunner.query(`UPDATE "assistant" SET "type" = 'OPENAI';`) + + const assistants: Assistant[] = await queryRunner.query(`SELECT * FROM "assistant";`) + for (let assistant of assistants) { + const details = JSON.parse(assistant.details) + if (!details?.id) await queryRunner.query(`UPDATE "assistant" SET "type" = 'CUSTOM' WHERE id = '${assistant.id}';`) + } + } + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE "assistant" DROP COLUMN "type";`) + } +} diff --git a/packages/server/src/database/migrations/postgres/1744964560174-AddErrorToEvaluationRun.ts b/packages/server/src/database/migrations/postgres/1744964560174-AddErrorToEvaluationRun.ts new file mode 100644 index 00000000000..9cb47a57eb9 --- /dev/null +++ b/packages/server/src/database/migrations/postgres/1744964560174-AddErrorToEvaluationRun.ts @@ -0,0 +1,11 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddErrorToEvaluationRun1744964560174 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE "evaluation_run" ADD COLUMN IF NOT EXISTS "errors" TEXT NULL DEFAULT '[]';`) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE "evaluation_run" DROP COLUMN "errors";`) + } +} diff --git a/packages/server/src/database/migrations/postgres/1748450230238-ModifyExecutionSessionIdFieldType.ts b/packages/server/src/database/migrations/postgres/1748450230238-ModifyExecutionSessionIdFieldType.ts new file mode 100644 index 00000000000..43656ff724f --- /dev/null +++ b/packages/server/src/database/migrations/postgres/1748450230238-ModifyExecutionSessionIdFieldType.ts @@ -0,0 +1,13 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class ModifyExecutionSessionIdFieldType1748450230238 implements MigrationInterface { + name = 'ModifyExecutionSessionIdFieldType1748450230238' + + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE "execution" ALTER COLUMN "sessionId" type varchar USING "sessionId"::varchar`) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE "execution" ALTER COLUMN "sessionId" type uuid USING "sessionId"::uuid`) + } +} diff --git a/packages/server/src/database/migrations/postgres/index.ts b/packages/server/src/database/migrations/postgres/index.ts index ac8fef734dd..4da17daa4ab 100644 --- a/packages/server/src/database/migrations/postgres/index.ts +++ b/packages/server/src/database/migrations/postgres/index.ts @@ -18,9 +18,12 @@ import { AddUpsertHistoryEntity1709814301358 } from './1709814301358-AddUpsertHi import { FieldTypes1710497452584 } from './1710497452584-FieldTypes' import { AddLead1710832137905 } from './1710832137905-AddLead' import { AddLeadToChatMessage1711538016098 } from './1711538016098-AddLeadToChatMessage' -import { AddVectorStoreConfigToDocStore1715861032479 } from './1715861032479-AddVectorStoreConfigToDocStore' import { AddDocumentStore1711637331047 } from './1711637331047-AddDocumentStore' +import { AddEvaluation1714548873039 } from './1714548873039-AddEvaluation' +import { AddDatasets1714548903384 } from './1714548903384-AddDataset' import { AddAgentReasoningToChatMessage1714679514451 } from './1714679514451-AddAgentReasoningToChatMessage' +import { AddEvaluator1714808591644 } from './1714808591644-AddEvaluator' +import { AddVectorStoreConfigToDocStore1715861032479 } from './1715861032479-AddVectorStoreConfigToDocStore' import { AddTypeToChatFlow1716300000000 } from './1716300000000-AddTypeToChatFlow' import { AddApiKey1720230151480 } from './1720230151480-AddApiKey' import { AddActionToChatMessage1721078251523 } from './1721078251523-AddActionToChatMessage' @@ -28,7 +31,23 @@ import { AddCustomTemplate1725629836652 } from './1725629836652-AddCustomTemplat import { AddArtifactsToChatMessage1726156258465 } from './1726156258465-AddArtifactsToChatMessage' import { AddFollowUpPrompts1726666309552 } from './1726666309552-AddFollowUpPrompts' import { AddTypeToAssistant1733011290987 } from './1733011290987-AddTypeToAssistant' +import { AddSeqNoToDatasetRow1733752119696 } from './1733752119696-AddSeqNoToDatasetRow' import { AddExecutionEntity1738090872625 } from './1738090872625-AddExecutionEntity' +import { FixOpenSourceAssistantTable1743758056188 } from './1743758056188-FixOpenSourceAssistantTable' +import { AddErrorToEvaluationRun1744964560174 } from './1744964560174-AddErrorToEvaluationRun' +import { ModifyExecutionSessionIdFieldType1748450230238 } from './1748450230238-ModifyExecutionSessionIdFieldType' + +import { AddAuthTables1720230151482 } from '../../../enterprise/database/migrations/postgres/1720230151482-AddAuthTables' +import { AddWorkspace1720230151484 } from '../../../enterprise/database/migrations/postgres/1720230151484-AddWorkspace' +import { AddWorkspaceShared1726654922034 } from '../../../enterprise/database/migrations/postgres/1726654922034-AddWorkspaceShared' +import { AddWorkspaceIdToCustomTemplate1726655750383 } from '../../../enterprise/database/migrations/postgres/1726655750383-AddWorkspaceIdToCustomTemplate' +import { AddOrganization1727798417345 } from '../../../enterprise/database/migrations/postgres/1727798417345-AddOrganization' +import { LinkWorkspaceId1729130948686 } from '../../../enterprise/database/migrations/postgres/1729130948686-LinkWorkspaceId' +import { LinkOrganizationId1729133111652 } from '../../../enterprise/database/migrations/postgres/1729133111652-LinkOrganizationId' +import { AddSSOColumns1730519457880 } from '../../../enterprise/database/migrations/postgres/1730519457880-AddSSOColumns' +import { AddPersonalWorkspace1734074497540 } from '../../../enterprise/database/migrations/postgres/1734074497540-AddPersonalWorkspace' +import { RefactorEnterpriseDatabase1737076223692 } from '../../../enterprise/database/migrations/postgres/1737076223692-RefactorEnterpriseDatabase' +import { ExecutionLinkWorkspaceId1746862866554 } from '../../../enterprise/database/migrations/postgres/1746862866554-ExecutionLinkWorkspaceId' export const postgresMigrations = [ Init1693891895163, @@ -49,17 +68,35 @@ export const postgresMigrations = [ AddUpsertHistoryEntity1709814301358, AddFeedback1707213601923, FieldTypes1710497452584, + AddEvaluation1714548873039, + AddDatasets1714548903384, + AddEvaluator1714808591644, AddDocumentStore1711637331047, AddLead1710832137905, AddLeadToChatMessage1711538016098, AddAgentReasoningToChatMessage1714679514451, - AddTypeToChatFlow1716300000000, AddVectorStoreConfigToDocStore1715861032479, + AddTypeToChatFlow1716300000000, AddApiKey1720230151480, AddActionToChatMessage1721078251523, AddCustomTemplate1725629836652, AddArtifactsToChatMessage1726156258465, AddFollowUpPrompts1726666309552, AddTypeToAssistant1733011290987, - AddExecutionEntity1738090872625 + AddAuthTables1720230151482, + AddWorkspace1720230151484, + AddWorkspaceShared1726654922034, + AddWorkspaceIdToCustomTemplate1726655750383, + AddOrganization1727798417345, + LinkWorkspaceId1729130948686, + LinkOrganizationId1729133111652, + AddSSOColumns1730519457880, + AddSeqNoToDatasetRow1733752119696, + AddPersonalWorkspace1734074497540, + RefactorEnterpriseDatabase1737076223692, + AddExecutionEntity1738090872625, + FixOpenSourceAssistantTable1743758056188, + AddErrorToEvaluationRun1744964560174, + ExecutionLinkWorkspaceId1746862866554, + ModifyExecutionSessionIdFieldType1748450230238 ] diff --git a/packages/server/src/database/migrations/sqlite/1714548873039-AddEvaluation.ts b/packages/server/src/database/migrations/sqlite/1714548873039-AddEvaluation.ts new file mode 100644 index 00000000000..9b1da5ebabc --- /dev/null +++ b/packages/server/src/database/migrations/sqlite/1714548873039-AddEvaluation.ts @@ -0,0 +1,37 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddEvaluation1714548873039 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS "evaluation" ( + "id" varchar PRIMARY KEY NOT NULL, + "name" varchar NOT NULL, + "chatflowId" text NOT NULL, + "chatflowName" text NOT NULL, + "datasetId" varchar NOT NULL, + "datasetName" varchar NOT NULL, + "additionalConfig" text, + "status" varchar NOT NULL, + "evaluationType" varchar, + "average_metrics" text, + "runDate" datetime NOT NULL DEFAULT (datetime('now')));` + ) + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS "evaluation_run" ( + "id" varchar PRIMARY KEY NOT NULL, + "evaluationId" text NOT NULL, + "input" text NOT NULL, + "expectedOutput" text NOT NULL, + "actualOutput" text NOT NULL, + "evaluators" text, + "llmEvaluators" TEXT DEFAULT NULL, + "metrics" text NULL, + "runDate" datetime NOT NULL DEFAULT (datetime('now')));` + ) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP TABLE evaluation`) + await queryRunner.query(`DROP TABLE evaluation_run`) + } +} diff --git a/packages/server/src/database/migrations/sqlite/1714548903384-AddDataset.ts b/packages/server/src/database/migrations/sqlite/1714548903384-AddDataset.ts new file mode 100644 index 00000000000..b59abea1429 --- /dev/null +++ b/packages/server/src/database/migrations/sqlite/1714548903384-AddDataset.ts @@ -0,0 +1,25 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddDatasets1714548903384 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS "dataset" ("id" varchar PRIMARY KEY NOT NULL, + "name" text NOT NULL, + "description" varchar, + "createdDate" datetime NOT NULL DEFAULT (datetime('now')), + "updatedDate" datetime NOT NULL DEFAULT (datetime('now')));` + ) + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS "dataset_row" ("id" varchar PRIMARY KEY NOT NULL, + "datasetId" text NOT NULL, + "input" text NOT NULL, + "output" text NOT NULL, + "updatedDate" datetime NOT NULL DEFAULT (datetime('now')));` + ) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP TABLE dataset`) + await queryRunner.query(`DROP TABLE dataset_row`) + } +} diff --git a/packages/server/src/database/migrations/sqlite/1714808591644-AddEvaluator.ts b/packages/server/src/database/migrations/sqlite/1714808591644-AddEvaluator.ts new file mode 100644 index 00000000000..af85cf27183 --- /dev/null +++ b/packages/server/src/database/migrations/sqlite/1714808591644-AddEvaluator.ts @@ -0,0 +1,18 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddEvaluator1714808591644 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS "evaluator" ("id" varchar PRIMARY KEY NOT NULL, +"name" text NOT NULL, +"type" varchar, +"config" text, +"createdDate" datetime NOT NULL DEFAULT (datetime('now')), +"updatedDate" datetime NOT NULL DEFAULT (datetime('now')));` + ) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP TABLE evaluator`) + } +} diff --git a/packages/server/src/database/migrations/sqlite/1733752119696-AddSeqNoToDatasetRow.ts b/packages/server/src/database/migrations/sqlite/1733752119696-AddSeqNoToDatasetRow.ts new file mode 100644 index 00000000000..8d1cb35605a --- /dev/null +++ b/packages/server/src/database/migrations/sqlite/1733752119696-AddSeqNoToDatasetRow.ts @@ -0,0 +1,11 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddSeqNoToDatasetRow1733752119696 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE "dataset_row" ADD COLUMN "sequence_no" integer DEFAULT -1;`) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE "dataset_row" DROP COLUMN "sequence_no";`) + } +} diff --git a/packages/server/src/database/migrations/sqlite/1743758056188-FixOpenSourceAssistantTable.ts b/packages/server/src/database/migrations/sqlite/1743758056188-FixOpenSourceAssistantTable.ts new file mode 100644 index 00000000000..61c611e2e5d --- /dev/null +++ b/packages/server/src/database/migrations/sqlite/1743758056188-FixOpenSourceAssistantTable.ts @@ -0,0 +1,28 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' +import { Assistant } from '../../entities/Assistant' + +export async function fixOpenSourceAssistantTable(queryRunner: QueryRunner): Promise { + const columnExists = await queryRunner.hasColumn('assistant', 'type') + if (!columnExists) { + await queryRunner.query(`ALTER TABLE "assistant" ADD COLUMN "type" TEXT;`) + await queryRunner.query(`UPDATE "assistant" SET "type" = 'OPENAI';`) + + const assistants: Assistant[] = await queryRunner.query(`SELECT * FROM "assistant";`) + for (let assistant of assistants) { + const details = JSON.parse(assistant.details) + if (!details?.id) await queryRunner.query(`UPDATE "assistant" SET "type" = 'CUSTOM' WHERE id = '${assistant.id}';`) + } + } +} + +export class FixOpenSourceAssistantTable1743758056188 implements MigrationInterface { + name = 'FixOpenSourceAssistantTable1743758056188' + + public async up(queryRunner: QueryRunner): Promise { + await fixOpenSourceAssistantTable(queryRunner) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE "assistant" DROP COLUMN "type";`) + } +} diff --git a/packages/server/src/database/migrations/sqlite/1744964560174-AddErrorToEvaluationRun.ts b/packages/server/src/database/migrations/sqlite/1744964560174-AddErrorToEvaluationRun.ts new file mode 100644 index 00000000000..8f576aa1ddb --- /dev/null +++ b/packages/server/src/database/migrations/sqlite/1744964560174-AddErrorToEvaluationRun.ts @@ -0,0 +1,11 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddErrorToEvaluationRun1744964560174 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE "evaluation_run" ADD COLUMN "errors" TEXT NULL DEFAULT '[]';`) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE "evaluation_run" DROP COLUMN "errors";`) + } +} diff --git a/packages/server/src/database/migrations/sqlite/index.ts b/packages/server/src/database/migrations/sqlite/index.ts index 4ebcbb92152..0b15e26938f 100644 --- a/packages/server/src/database/migrations/sqlite/index.ts +++ b/packages/server/src/database/migrations/sqlite/index.ts @@ -17,17 +17,35 @@ import { AddFeedback1707213619308 } from './1707213619308-AddFeedback' import { AddUpsertHistoryEntity1709814301358 } from './1709814301358-AddUpsertHistoryEntity' import { AddLead1710832117612 } from './1710832117612-AddLead' import { AddLeadToChatMessage1711537986113 } from './1711537986113-AddLeadToChatMessage' -import { AddVectorStoreConfigToDocStore1715861032479 } from './1715861032479-AddVectorStoreConfigToDocStore' import { AddDocumentStore1711637331047 } from './1711637331047-AddDocumentStore' +import { AddEvaluation1714548873039 } from './1714548873039-AddEvaluation' +import { AddDatasets1714548903384 } from './1714548903384-AddDataset' import { AddAgentReasoningToChatMessage1714679514451 } from './1714679514451-AddAgentReasoningToChatMessage' +import { AddEvaluator1714808591644 } from './1714808591644-AddEvaluator' +import { AddVectorStoreConfigToDocStore1715861032479 } from './1715861032479-AddVectorStoreConfigToDocStore' import { AddTypeToChatFlow1716300000000 } from './1716300000000-AddTypeToChatFlow' import { AddApiKey1720230151480 } from './1720230151480-AddApiKey' import { AddActionToChatMessage1721078251523 } from './1721078251523-AddActionToChatMessage' -import { AddArtifactsToChatMessage1726156258465 } from './1726156258465-AddArtifactsToChatMessage' import { AddCustomTemplate1725629836652 } from './1725629836652-AddCustomTemplate' +import { AddArtifactsToChatMessage1726156258465 } from './1726156258465-AddArtifactsToChatMessage' import { AddFollowUpPrompts1726666294213 } from './1726666294213-AddFollowUpPrompts' import { AddTypeToAssistant1733011290987 } from './1733011290987-AddTypeToAssistant' +import { AddSeqNoToDatasetRow1733752119696 } from './1733752119696-AddSeqNoToDatasetRow' import { AddExecutionEntity1738090872625 } from './1738090872625-AddExecutionEntity' +import { FixOpenSourceAssistantTable1743758056188 } from './1743758056188-FixOpenSourceAssistantTable' +import { AddErrorToEvaluationRun1744964560174 } from './1744964560174-AddErrorToEvaluationRun' + +import { AddAuthTables1720230151482 } from '../../../enterprise/database/migrations/sqlite/1720230151482-AddAuthTables' +import { AddWorkspace1720230151484 } from '../../../enterprise/database/migrations/sqlite/1720230151484-AddWorkspace' +import { AddWorkspaceShared1726654922034 } from '../../../enterprise/database/migrations/sqlite/1726654922034-AddWorkspaceShared' +import { AddWorkspaceIdToCustomTemplate1726655750383 } from '../../../enterprise/database/migrations/sqlite/1726655750383-AddWorkspaceIdToCustomTemplate' +import { AddOrganization1727798417345 } from '../../../enterprise/database/migrations/sqlite/1727798417345-AddOrganization' +import { LinkWorkspaceId1729130948686 } from '../../../enterprise/database/migrations/sqlite/1729130948686-LinkWorkspaceId' +import { LinkOrganizationId1729133111652 } from '../../../enterprise/database/migrations/sqlite/1729133111652-LinkOrganizationId' +import { AddSSOColumns1730519457880 } from '../../../enterprise/database/migrations/sqlite/1730519457880-AddSSOColumns' +import { AddPersonalWorkspace1734074497540 } from '../../../enterprise/database/migrations/sqlite/1734074497540-AddPersonalWorkspace' +import { RefactorEnterpriseDatabase1737076223692 } from '../../../enterprise/database/migrations/sqlite/1737076223692-RefactorEnterpriseDatabase' +import { ExecutionLinkWorkspaceId1746862866554 } from '../../../enterprise/database/migrations/sqlite/1746862866554-ExecutionLinkWorkspaceId' export const sqliteMigrations = [ Init1693835579790, @@ -46,18 +64,35 @@ export const sqliteMigrations = [ AddFileUploadsToChatMessage1701788586491, AddSpeechToText1706364937060, AddUpsertHistoryEntity1709814301358, + AddEvaluation1714548873039, + AddDatasets1714548903384, + AddEvaluator1714808591644, AddFeedback1707213619308, AddDocumentStore1711637331047, AddLead1710832117612, AddLeadToChatMessage1711537986113, AddAgentReasoningToChatMessage1714679514451, - AddTypeToChatFlow1716300000000, AddVectorStoreConfigToDocStore1715861032479, + AddTypeToChatFlow1716300000000, AddApiKey1720230151480, AddActionToChatMessage1721078251523, AddArtifactsToChatMessage1726156258465, - AddCustomTemplate1725629836652, AddFollowUpPrompts1726666294213, AddTypeToAssistant1733011290987, - AddExecutionEntity1738090872625 + AddCustomTemplate1725629836652, + AddAuthTables1720230151482, + AddWorkspace1720230151484, + AddWorkspaceShared1726654922034, + AddWorkspaceIdToCustomTemplate1726655750383, + AddOrganization1727798417345, + LinkWorkspaceId1729130948686, + LinkOrganizationId1729133111652, + AddSSOColumns1730519457880, + AddSeqNoToDatasetRow1733752119696, + AddPersonalWorkspace1734074497540, + RefactorEnterpriseDatabase1737076223692, + AddExecutionEntity1738090872625, + FixOpenSourceAssistantTable1743758056188, + AddErrorToEvaluationRun1744964560174, + ExecutionLinkWorkspaceId1746862866554 ] diff --git a/packages/server/src/enterprise/Interface.Enterprise.ts b/packages/server/src/enterprise/Interface.Enterprise.ts new file mode 100644 index 00000000000..0e0482d9365 --- /dev/null +++ b/packages/server/src/enterprise/Interface.Enterprise.ts @@ -0,0 +1,133 @@ +import { z } from 'zod' + +export enum UserStatus { + INVITED = 'invited', + DISABLED = 'disabled', + ACTIVE = 'active' +} + +export class IUser { + id: string + email: string + name: string + credential: string + status: UserStatus + tempToken: string + tokenExpiry?: Date + role: string + lastLogin: Date + activeWorkspaceId: string + isApiKeyValidated?: boolean + loginMode?: string + activeOrganizationId?: string +} + +export interface IWorkspaceUser { + id: string + workspaceId: string + userId: string + role: string +} + +export interface IWorkspaceShared { + id: string + workspaceId: string + sharedItemId: string + itemType: string + createdDate: Date + updatedDate: Date +} + +export interface ILoginActivity { + id: string + username: string + activityCode: number + message: string + loginMode: string + attemptedDateTime: Date +} + +export enum LoginActivityCode { + LOGIN_SUCCESS = 0, + LOGOUT_SUCCESS = 1, + UNKNOWN_USER = -1, + INCORRECT_CREDENTIAL = -2, + USER_DISABLED = -3, + NO_ASSIGNED_WORKSPACE = -4, + INVALID_LOGIN_MODE = -5, + REGISTRATION_PENDING = -6, + UNKNOWN_ERROR = -99 +} + +export type IAssignedWorkspace = { id: string; name: string; role: string; organizationId: string } +export type LoggedInUser = { + id: string + email: string + name: string + roleId: string + activeOrganizationId: string + activeOrganizationSubscriptionId: string + activeOrganizationCustomerId: string + activeOrganizationProductId: string + isOrganizationAdmin: boolean + activeWorkspaceId: string + activeWorkspace: string + assignedWorkspaces: IAssignedWorkspace[] + isApiKeyValidated: boolean + permissions?: string[] + features?: Record + ssoRefreshToken?: string + ssoToken?: string + ssoProvider?: string +} + +export enum ErrorMessage { + INVALID_MISSING_TOKEN = 'Invalid or Missing token', + TOKEN_EXPIRED = 'Token Expired', + REFRESH_TOKEN_EXPIRED = 'Refresh Token Expired', + FORBIDDEN = 'Forbidden', + UNKNOWN_USER = 'Unknown Username or Password', + INCORRECT_PASSWORD = 'Incorrect Password', + INACTIVE_USER = 'Inactive User', + INVITED_USER = 'User Invited, but has not registered', + INVALID_WORKSPACE = 'No Workspace Assigned', + UNKNOWN_ERROR = 'Unknown Error' +} + +// IMPORTANT: update the schema on the client side as well +// packages/ui/src/views/organization/index.jsx +export const OrgSetupSchema = z + .object({ + orgName: z.string().min(1, 'Organization name is required'), + username: z.string().min(1, 'Name is required'), + email: z.string().min(1, 'Email is required').email('Invalid email address'), + password: z + .string() + .min(8, 'Password must be at least 8 characters') + .regex(/[A-Z]/, 'Password must contain at least one uppercase letter') + .regex(/[!@#$%^&*]/, 'Password must contain at least one special character'), + confirmPassword: z.string().min(1, 'Confirm Password is required') + }) + .refine((data) => data.password === data.confirmPassword, { + message: "Passwords don't match", + path: ['confirmPassword'] + }) + +// IMPORTANT: when updating this schema, update the schema on the server as well +// packages/ui/src/views/auth/register.jsx +export const RegisterUserSchema = z + .object({ + username: z.string().min(1, 'Name is required'), + email: z.string().min(1, 'Email is required').email('Invalid email address'), + password: z + .string() + .min(8, 'Password must be at least 8 characters') + .regex(/[A-Z]/, 'Password must contain at least one uppercase letter') + .regex(/[!@#$%^&*]/, 'Password must contain at least one special character'), + confirmPassword: z.string().min(1, 'Confirm Password is required'), + token: z.string().min(1, 'Invite Code is required') + }) + .refine((data) => data.password === data.confirmPassword, { + message: "Passwords don't match", + path: ['confirmPassword'] + }) diff --git a/packages/server/src/enterprise/LICENSE.md b/packages/server/src/enterprise/LICENSE.md new file mode 100644 index 00000000000..462c48dc6de --- /dev/null +++ b/packages/server/src/enterprise/LICENSE.md @@ -0,0 +1,46 @@ +The FlowiseAI Inc Commercial License (the "Commercial License") +Copyright (c) 2023-present FlowiseAI, Inc + +With regard to the FlowiseAI Inc Software: + +This software and associated documentation files (the "Software") may only be +used in production, if you (and any entity that you represent) have agreed to, +and are in compliance with, the FlowiseAI Inc Subscription Terms available +at https://flowiseai.com/terms, or other agreements governing +the use of the Software, as mutually agreed by you and FlowiseAI Inc, Inc ("FlowiseAI"), +and otherwise have a valid FlowiseAI Inc Enterprise Edition subscription ("Commercial Subscription") +for the correct number of hosts as defined in the "Commercial Terms ("Hosts"). Subject to the foregoing sentence, +you are free to modify this Software and publish patches to the Software. You agree +that FlowiseAI Inc and/or its licensors (as applicable) retain all right, title and interest in +and to all such modifications and/or patches, and all such modifications and/or +patches may only be used, copied, modified, displayed, distributed, or otherwise +exploited with a valid Commercial Subscription for the correct number of hosts. +Notwithstanding the foregoing, you may copy and modify the Software for development +and testing purposes, without requiring a subscription. You agree that FlowiseAI Inc and/or +its licensors (as applicable) retain all right, title and interest in and to all such +modifications. You are not granted any other rights beyond what is expressly stated herein. +Subject to the foregoing, it is forbidden to copy, merge, publish, distribute, sublicense, +and/or sell the Software. + +This Commercial License applies only to the part of this Software that is not distributed under +the Apache 2.0 license. The Open Source version of Flowise is licensed under the Apache License, Version 2.0. +Unauthorized copying, modification, distribution, or use of the Enterprise and Cloud versions +is strictly prohibited without a valid license agreement from FlowiseAI, Inc. + +For information about licensing of the Enterprise and Cloud versions, please contact: +security@flowiseai.com + +The full text of this Commercial License shall +be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +For all third party components incorporated into the FlowiseAI Inc Software, those +components are licensed under the original license provided by the owner of the +applicable component. diff --git a/packages/server/src/enterprise/controllers/account.controller.ts b/packages/server/src/enterprise/controllers/account.controller.ts new file mode 100644 index 00000000000..f29af60fc0c --- /dev/null +++ b/packages/server/src/enterprise/controllers/account.controller.ts @@ -0,0 +1,161 @@ +import { Request, Response, NextFunction } from 'express' +import { StatusCodes } from 'http-status-codes' +import { AccountService } from '../services/account.service' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' +import axios from 'axios' + +export class AccountController { + public async register(req: Request, res: Response, next: NextFunction) { + try { + const accountService = new AccountService() + const data = await accountService.register(req.body) + return res.status(StatusCodes.CREATED).json(data) + } catch (error) { + next(error) + } + } + + public async invite(req: Request, res: Response, next: NextFunction) { + try { + const accountService = new AccountService() + const data = await accountService.invite(req.body, req.user) + return res.status(StatusCodes.CREATED).json(data) + } catch (error) { + next(error) + } + } + + public async login(req: Request, res: Response, next: NextFunction) { + try { + const accountService = new AccountService() + const data = await accountService.login(req.body) + return res.status(StatusCodes.CREATED).json(data) + } catch (error) { + next(error) + } + } + + public async verify(req: Request, res: Response, next: NextFunction) { + try { + const accountService = new AccountService() + const data = await accountService.verify(req.body) + return res.status(StatusCodes.CREATED).json(data) + } catch (error) { + next(error) + } + } + + public async resendVerificationEmail(req: Request, res: Response, next: NextFunction) { + try { + const accountService = new AccountService() + const data = await accountService.resendVerificationEmail(req.body) + return res.status(StatusCodes.CREATED).json(data) + } catch (error) { + next(error) + } + } + + public async forgotPassword(req: Request, res: Response, next: NextFunction) { + try { + const accountService = new AccountService() + const data = await accountService.forgotPassword(req.body) + return res.status(StatusCodes.CREATED).json(data) + } catch (error) { + next(error) + } + } + + public async resetPassword(req: Request, res: Response, next: NextFunction) { + try { + const accountService = new AccountService() + const data = await accountService.resetPassword(req.body) + return res.status(StatusCodes.CREATED).json(data) + } catch (error) { + next(error) + } + } + + public async createStripeCustomerPortalSession(req: Request, res: Response, next: NextFunction) { + try { + const { url: portalSessionUrl } = await getRunningExpressApp().identityManager.createStripeCustomerPortalSession(req) + return res.status(StatusCodes.OK).json({ url: portalSessionUrl }) + } catch (error) { + next(error) + } + } + + public async cancelPreviousCloudSubscrption(req: Request, res: Response, next: NextFunction) { + try { + const { email } = req.body + if (!email) { + return res.status(StatusCodes.BAD_REQUEST).json({ message: 'Email is required' }) + } + + const headers = { + 'Content-Type': 'application/json', + Accept: 'application/json' + } + + const response = await axios.post(`${process.env.ENGINE_URL}/cancel-subscription`, { email }, { headers }) + + if (response.status === 200) { + return res.status(StatusCodes.OK).json(response.data) + } else { + return res.status(response.status).json(response.data) + } + } catch (error) { + next(error) + } + } + + public async logout(req: Request, res: Response, next: NextFunction) { + try { + if (req.user) { + const accountService = new AccountService() + await accountService.logout(req.user) + if (req.isAuthenticated()) { + req.logout((err) => { + if (err) { + return res.status(500).json({ message: 'Logout failed' }) + } + req.session.destroy((err) => { + if (err) { + return res.status(500).json({ message: 'Failed to destroy session' }) + } + }) + }) + } else { + // For JWT-based users (owner, org_admin) + res.clearCookie('connect.sid') // Clear the session cookie + res.clearCookie('token') // Clear the JWT cookie + res.clearCookie('refreshToken') // Clear the JWT cookie + return res.redirect('/login') // Redirect to the login page + } + } + return res.status(200).json({ message: 'logged_out', redirectTo: `/login` }) + } catch (error) { + next(error) + } + } + + public async getBasicAuth(req: Request, res: Response) { + if (process.env.FLOWISE_USERNAME && process.env.FLOWISE_PASSWORD) { + return res.status(StatusCodes.OK).json({ + isUsernamePasswordSet: true + }) + } else { + return res.status(StatusCodes.OK).json({ + isUsernamePasswordSet: false + }) + } + } + + public async checkBasicAuth(req: Request, res: Response) { + const { username, password } = req.body + if (username === process.env.FLOWISE_USERNAME && password === process.env.FLOWISE_PASSWORD) { + return res.json({ message: 'Authentication successful' }) + } else { + return res.json({ message: 'Authentication failed' }) + } + } +} diff --git a/packages/server/src/enterprise/controllers/audit/index.ts b/packages/server/src/enterprise/controllers/audit/index.ts new file mode 100644 index 00000000000..6c95ca310fb --- /dev/null +++ b/packages/server/src/enterprise/controllers/audit/index.ts @@ -0,0 +1,33 @@ +import { NextFunction, Request, Response } from 'express' +import auditService from '../../services/audit' +import { InternalFlowiseError } from '../../../errors/internalFlowiseError' +import { StatusCodes } from 'http-status-codes' + +const fetchLoginActivity = async (req: Request, res: Response, next: NextFunction) => { + try { + if (typeof req.body === 'undefined') { + throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: auditService.fetchLoginHistory - body not provided!`) + } + const apiResponse = await auditService.fetchLoginActivity(req.body) + return res.json(apiResponse) + } catch (error) { + next(error) + } +} + +const deleteLoginActivity = async (req: Request, res: Response, next: NextFunction) => { + try { + if (typeof req.body === 'undefined') { + throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: auditService.deleteLoginHistory - body not provided!`) + } + const apiResponse = await auditService.deleteLoginActivity(req.body) + return res.json(apiResponse) + } catch (error) { + next(error) + } +} + +export default { + fetchLoginActivity, + deleteLoginActivity +} diff --git a/packages/server/src/enterprise/controllers/auth/index.ts b/packages/server/src/enterprise/controllers/auth/index.ts new file mode 100644 index 00000000000..583eb8d116d --- /dev/null +++ b/packages/server/src/enterprise/controllers/auth/index.ts @@ -0,0 +1,15 @@ +import { NextFunction, Request, Response } from 'express' +import { getRunningExpressApp } from '../../../utils/getRunningExpressApp' + +const getAllPermissions = async (req: Request, res: Response, next: NextFunction) => { + try { + const appServer = getRunningExpressApp() + return res.json(appServer.identityManager.getPermissions()) + } catch (error) { + next(error) + } +} + +export default { + getAllPermissions +} diff --git a/packages/server/src/enterprise/controllers/login-method.controller.ts b/packages/server/src/enterprise/controllers/login-method.controller.ts new file mode 100644 index 00000000000..e7c9330f319 --- /dev/null +++ b/packages/server/src/enterprise/controllers/login-method.controller.ts @@ -0,0 +1,142 @@ +import { NextFunction, Request, Response } from 'express' +import { StatusCodes } from 'http-status-codes' +import { LoginMethodErrorMessage, LoginMethodService } from '../services/login-method.service' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' +import { LoginMethod, LoginMethodStatus } from '../database/entities/login-method.entity' +import { InternalFlowiseError } from '../../errors/internalFlowiseError' +import { decrypt } from '../utils/encryption.util' +import AzureSSO from '../sso/AzureSSO' +import GoogleSSO from '../sso/GoogleSSO' +import Auth0SSO from '../sso/Auth0SSO' +import { OrganizationService } from '../services/organization.service' +import { Platform } from '../../Interface' +import GithubSSO from '../sso/GithubSSO' + +export class LoginMethodController { + public async create(req: Request, res: Response, next: NextFunction) { + try { + const loginMethodService = new LoginMethodService() + const loginMethod = await loginMethodService.createLoginMethod(req.body) + return res.status(StatusCodes.CREATED).json(loginMethod) + } catch (error) { + next(error) + } + } + + public async defaultMethods(req: Request, res: Response, next: NextFunction) { + let queryRunner + try { + queryRunner = getRunningExpressApp().AppDataSource.createQueryRunner() + await queryRunner.connect() + let organizationId + if (getRunningExpressApp().identityManager.getPlatformType() === Platform.CLOUD) { + organizationId = undefined + } else if (getRunningExpressApp().identityManager.getPlatformType() === Platform.ENTERPRISE) { + const organizationService = new OrganizationService() + const organizations = await organizationService.readOrganization(queryRunner) + if (organizations.length > 0) { + organizationId = organizations[0].id + } else { + return res.status(StatusCodes.OK).json({}) + } + } else { + return res.status(StatusCodes.OK).json({}) + } + const loginMethodService = new LoginMethodService() + + const providers: string[] = [] + + let loginMethod = await loginMethodService.readLoginMethodByOrganizationId(organizationId, queryRunner) + if (loginMethod) { + for (let method of loginMethod) { + if (method.status === LoginMethodStatus.ENABLE) providers.push(method.name) + } + } + return res.status(StatusCodes.OK).json({ providers: providers }) + } catch (error) { + next(error) + } finally { + if (queryRunner) await queryRunner.release() + } + } + + public async read(req: Request, res: Response, next: NextFunction) { + let queryRunner + try { + queryRunner = getRunningExpressApp().AppDataSource.createQueryRunner() + await queryRunner.connect() + const query = req.query as Partial + const loginMethodService = new LoginMethodService() + + const loginMethodConfig = { + providers: [], + callbacks: [ + { providerName: 'azure', callbackURL: AzureSSO.getCallbackURL() }, + { providerName: 'google', callbackURL: GoogleSSO.getCallbackURL() }, + { providerName: 'auth0', callbackURL: Auth0SSO.getCallbackURL() }, + { providerName: 'github', callbackURL: GithubSSO.getCallbackURL() } + ] + } + let loginMethod: any + if (query.id) { + loginMethod = await loginMethodService.readLoginMethodById(query.id, queryRunner) + if (!loginMethod) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, LoginMethodErrorMessage.LOGIN_METHOD_NOT_FOUND) + loginMethod.config = JSON.parse(await decrypt(loginMethod.config)) + } else { + loginMethod = await loginMethodService.readLoginMethodByOrganizationId(query.organizationId, queryRunner) + + for (let method of loginMethod) { + method.config = JSON.parse(await decrypt(method.config)) + } + loginMethodConfig.providers = loginMethod + } + return res.status(StatusCodes.OK).json(loginMethodConfig) + } catch (error) { + next(error) + } finally { + if (queryRunner) await queryRunner.release() + } + } + public async update(req: Request, res: Response, next: NextFunction) { + try { + const loginMethodService = new LoginMethodService() + const loginMethod = await loginMethodService.createOrUpdateConfig(req.body) + if (loginMethod?.status === 'OK' && loginMethod?.organizationId) { + const appServer = getRunningExpressApp() + let providers: any[] = req.body.providers + providers.map((provider: any) => { + const identityManager = appServer.identityManager + if (provider.config.clientID) { + provider.config.configEnabled = provider.status === LoginMethodStatus.ENABLE + identityManager.initializeSsoProvider(appServer.app, provider.providerName, provider.config) + } + }) + } + return res.status(StatusCodes.OK).json(loginMethod) + } catch (error) { + next(error) + } + } + public async testConfig(req: Request, res: Response, next: NextFunction) { + try { + const providers = req.body.providers + if (req.body.providerName === 'azure') { + const response = await AzureSSO.testSetup(providers[0].config) + return res.json(response) + } else if (req.body.providerName === 'google') { + const response = await GoogleSSO.testSetup(providers[0].config) + return res.json(response) + } else if (req.body.providerName === 'auth0') { + const response = await Auth0SSO.testSetup(providers[0].config) + return res.json(response) + } else if (req.body.providerName === 'github') { + const response = await GithubSSO.testSetup(providers[0].config) + return res.json(response) + } else { + return res.json({ error: 'Provider not supported' }) + } + } catch (error) { + next(error) + } + } +} diff --git a/packages/server/src/enterprise/controllers/organization-user.controller.ts b/packages/server/src/enterprise/controllers/organization-user.controller.ts new file mode 100644 index 00000000000..466c6ee1fe5 --- /dev/null +++ b/packages/server/src/enterprise/controllers/organization-user.controller.ts @@ -0,0 +1,146 @@ +import { NextFunction, Request, Response } from 'express' +import { StatusCodes } from 'http-status-codes' +import { InternalFlowiseError } from '../../errors/internalFlowiseError' +import { GeneralErrorMessage } from '../../utils/constants' +import { checkUsageLimit } from '../../utils/quotaUsage' +import { OrganizationUser } from '../database/entities/organization-user.entity' +import { Organization } from '../database/entities/organization.entity' + +type OrganizationUserQuery = Partial> + +import { QueryRunner } from 'typeorm' +import { Platform } from '../../Interface' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' +import { GeneralRole } from '../database/entities/role.entity' +import { User, UserStatus } from '../database/entities/user.entity' +import { WorkspaceUser } from '../database/entities/workspace-user.entity' +import { OrganizationUserService } from '../services/organization-user.service' +import { RoleService } from '../services/role.service' +import { WorkspaceService } from '../services/workspace.service' + +export class OrganizationUserController { + public async create(req: Request, res: Response, next: NextFunction) { + try { + const organizationUserservice = new OrganizationUserService() + const totalOrgUsers = await organizationUserservice.readOrgUsersCountByOrgId(req.body.organizationId) + const subscriptionId = req.user?.activeOrganizationSubscriptionId || '' + await checkUsageLimit('users', subscriptionId, getRunningExpressApp().usageCacheManager, totalOrgUsers + 1) + const newOrganizationUser = await organizationUserservice.createOrganizationUser(req.body) + return res.status(StatusCodes.CREATED).json(newOrganizationUser) + } catch (error) { + next(error) + } + } + + public async read(req: Request, res: Response, next: NextFunction) { + let queryRunner + try { + queryRunner = getRunningExpressApp().AppDataSource.createQueryRunner() + await queryRunner.connect() + const query = req.query as OrganizationUserQuery + const organizationUserservice = new OrganizationUserService() + + let organizationUser: + | { + organization: Organization + organizationUser: OrganizationUser | null + } + | OrganizationUser + | null + | OrganizationUser[] + | (OrganizationUser & { + roleCount: number + })[] + if (query.organizationId && query.userId) { + organizationUser = await organizationUserservice.readOrganizationUserByOrganizationIdUserId( + query.organizationId, + query.userId, + queryRunner + ) + } else if (query.organizationId && query.roleId) { + organizationUser = await organizationUserservice.readOrganizationUserByOrganizationIdRoleId( + query.organizationId, + query.roleId, + queryRunner + ) + } else if (query.organizationId) { + organizationUser = await organizationUserservice.readOrganizationUserByOrganizationId(query.organizationId, queryRunner) + } else if (query.userId) { + organizationUser = await organizationUserservice.readOrganizationUserByUserId(query.userId, queryRunner) + } else { + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, GeneralErrorMessage.UNHANDLED_EDGE_CASE) + } + + return res.status(StatusCodes.OK).json(organizationUser) + } catch (error) { + next(error) + } finally { + if (queryRunner) await queryRunner.release() + } + } + + public async update(req: Request, res: Response, next: NextFunction) { + try { + const organizationUserService = new OrganizationUserService() + const organizationUser = await organizationUserService.updateOrganizationUser(req.body) + return res.status(StatusCodes.OK).json(organizationUser) + } catch (error) { + next(error) + } + } + + public async delete(req: Request, res: Response, next: NextFunction) { + let queryRunner: QueryRunner | undefined + try { + queryRunner = getRunningExpressApp().AppDataSource.createQueryRunner() + const currentPlatform = getRunningExpressApp().identityManager.getPlatformType() + await queryRunner.connect() + const query = req.query as Partial + if (!query.organizationId) { + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, 'Organization ID is required') + } + if (!query.userId) { + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, 'User ID is required') + } + + const organizationUserService = new OrganizationUserService() + const workspaceService = new WorkspaceService() + const roleService = new RoleService() + + let organizationUser: OrganizationUser + await queryRunner.startTransaction() + if (currentPlatform === Platform.ENTERPRISE) { + const personalRole = await roleService.readGeneralRoleByName(GeneralRole.PERSONAL_WORKSPACE, queryRunner) + const personalWorkspaces = await queryRunner.manager.findBy(WorkspaceUser, { + userId: query.userId, + roleId: personalRole.id + }) + if (personalWorkspaces.length === 1) + // delete personal workspace + await workspaceService.deleteWorkspaceById(queryRunner, personalWorkspaces[0].workspaceId) + // remove user from other workspces + organizationUser = await organizationUserService.deleteOrganizationUser(queryRunner, query.organizationId, query.userId) + // soft delete user because they might workspace might created by them + const deleteUser = await queryRunner.manager.findOneBy(User, { id: query.userId }) + if (!deleteUser) throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, GeneralErrorMessage.UNHANDLED_EDGE_CASE) + deleteUser.name = UserStatus.DELETED + deleteUser.email = `deleted_${deleteUser.id}_${Date.now()}@deleted.flowise` + deleteUser.status = UserStatus.DELETED + deleteUser.credential = null + deleteUser.tokenExpiry = null + deleteUser.tempToken = null + await queryRunner.manager.save(User, deleteUser) + } else { + organizationUser = await organizationUserService.deleteOrganizationUser(queryRunner, query.organizationId, query.userId) + } + + await queryRunner.commitTransaction() + return res.status(StatusCodes.OK).json(organizationUser) + } catch (error) { + if (queryRunner && queryRunner.isTransactionActive) await queryRunner.rollbackTransaction() + next(error) + } finally { + if (queryRunner && !queryRunner.isReleased) await queryRunner.release() + } + } +} diff --git a/packages/server/src/enterprise/controllers/organization.controller.ts b/packages/server/src/enterprise/controllers/organization.controller.ts new file mode 100644 index 00000000000..b7ca0a6d7a7 --- /dev/null +++ b/packages/server/src/enterprise/controllers/organization.controller.ts @@ -0,0 +1,187 @@ +import { Request, Response, NextFunction } from 'express' +import { StatusCodes } from 'http-status-codes' +import { OrganizationErrorMessage, OrganizationService } from '../services/organization.service' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' +import { InternalFlowiseError } from '../../errors/internalFlowiseError' +import { Organization } from '../database/entities/organization.entity' +import { GeneralErrorMessage } from '../../utils/constants' +import { OrganizationUserService } from '../services/organization-user.service' +import { getCurrentUsage } from '../../utils/quotaUsage' + +export class OrganizationController { + public async create(req: Request, res: Response, next: NextFunction) { + try { + const organizationUserService = new OrganizationUserService() + const newOrganization = await organizationUserService.createOrganization(req.body) + return res.status(StatusCodes.CREATED).json(newOrganization) + } catch (error) { + next(error) + } + } + + public async read(req: Request, res: Response, next: NextFunction) { + let queryRunner + try { + queryRunner = getRunningExpressApp().AppDataSource.createQueryRunner() + await queryRunner.connect() + const query = req.query as Partial + const organizationService = new OrganizationService() + + let organization: Organization | null + if (query.id) { + organization = await organizationService.readOrganizationById(query.id, queryRunner) + if (!organization) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, OrganizationErrorMessage.ORGANIZATION_NOT_FOUND) + } else if (query.name) { + organization = await organizationService.readOrganizationByName(query.name, queryRunner) + if (!organization) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, OrganizationErrorMessage.ORGANIZATION_NOT_FOUND) + } else { + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, GeneralErrorMessage.UNHANDLED_EDGE_CASE) + } + + return res.status(StatusCodes.OK).json(organization) + } catch (error) { + next(error) + } finally { + if (queryRunner) await queryRunner.release() + } + } + + public async update(req: Request, res: Response, next: NextFunction) { + try { + const organizationService = new OrganizationService() + const organization = await organizationService.updateOrganization(req.body) + return res.status(StatusCodes.OK).json(organization) + } catch (error) { + next(error) + } + } + + public async getAdditionalSeatsQuantity(req: Request, res: Response, next: NextFunction) { + try { + const { subscriptionId } = req.query + if (!subscriptionId) { + return res.status(400).json({ error: 'Subscription ID is required' }) + } + const organizationUserservice = new OrganizationUserService() + const totalOrgUsers = await organizationUserservice.readOrgUsersCountByOrgId(req.user?.activeOrganizationId as string) + + const identityManager = getRunningExpressApp().identityManager + const result = await identityManager.getAdditionalSeatsQuantity(subscriptionId as string) + + return res.status(StatusCodes.OK).json({ ...result, totalOrgUsers }) + } catch (error) { + next(error) + } + } + + public async getCustomerWithDefaultSource(req: Request, res: Response, next: NextFunction) { + try { + const { customerId } = req.query + if (!customerId) { + return res.status(400).json({ error: 'Customer ID is required' }) + } + const identityManager = getRunningExpressApp().identityManager + const result = await identityManager.getCustomerWithDefaultSource(customerId as string) + + return res.status(StatusCodes.OK).json(result) + } catch (error) { + next(error) + } + } + + public async getAdditionalSeatsProration(req: Request, res: Response, next: NextFunction) { + try { + const { subscriptionId, quantity } = req.query + if (!subscriptionId) { + return res.status(400).json({ error: 'Customer ID is required' }) + } + if (quantity === undefined) { + return res.status(400).json({ error: 'Quantity is required' }) + } + const identityManager = getRunningExpressApp().identityManager + const result = await identityManager.getAdditionalSeatsProration(subscriptionId as string, parseInt(quantity as string)) + + return res.status(StatusCodes.OK).json(result) + } catch (error) { + next(error) + } + } + + public async getPlanProration(req: Request, res: Response, next: NextFunction) { + try { + const { subscriptionId, newPlanId } = req.query + if (!subscriptionId) { + return res.status(400).json({ error: 'Subscription ID is required' }) + } + if (!newPlanId) { + return res.status(400).json({ error: 'New plan ID is required' }) + } + const identityManager = getRunningExpressApp().identityManager + const result = await identityManager.getPlanProration(subscriptionId as string, newPlanId as string) + + return res.status(StatusCodes.OK).json(result) + } catch (error) { + next(error) + } + } + + public async updateAdditionalSeats(req: Request, res: Response, next: NextFunction) { + try { + const { subscriptionId, quantity, prorationDate } = req.body + if (!subscriptionId) { + return res.status(400).json({ error: 'Subscription ID is required' }) + } + if (quantity === undefined) { + return res.status(400).json({ error: 'Quantity is required' }) + } + if (!prorationDate) { + return res.status(400).json({ error: 'Proration date is required' }) + } + const identityManager = getRunningExpressApp().identityManager + const result = await identityManager.updateAdditionalSeats(subscriptionId, quantity, prorationDate) + + return res.status(StatusCodes.OK).json(result) + } catch (error) { + next(error) + } + } + + public async updateSubscriptionPlan(req: Request, res: Response, next: NextFunction) { + try { + const { subscriptionId, newPlanId, prorationDate } = req.body + if (!subscriptionId) { + return res.status(400).json({ error: 'Subscription ID is required' }) + } + if (!newPlanId) { + return res.status(400).json({ error: 'New plan ID is required' }) + } + if (!prorationDate) { + return res.status(400).json({ error: 'Proration date is required' }) + } + const identityManager = getRunningExpressApp().identityManager + const result = await identityManager.updateSubscriptionPlan(req, subscriptionId, newPlanId, prorationDate) + + return res.status(StatusCodes.OK).json(result) + } catch (error) { + next(error) + } + } + + public async getCurrentUsage(req: Request, res: Response, next: NextFunction) { + try { + const orgId = req.user?.activeOrganizationId + const subscriptionId = req.user?.activeOrganizationSubscriptionId + if (!orgId) { + return res.status(400).json({ error: 'Organization ID is required' }) + } + if (!subscriptionId) { + return res.status(400).json({ error: 'Subscription ID is required' }) + } + const usageCacheManager = getRunningExpressApp().usageCacheManager + const result = await getCurrentUsage(orgId, subscriptionId, usageCacheManager) + return res.status(StatusCodes.OK).json(result) + } catch (error) { + next(error) + } + } +} diff --git a/packages/server/src/enterprise/controllers/role.controller.ts b/packages/server/src/enterprise/controllers/role.controller.ts new file mode 100644 index 00000000000..d12b8657ab6 --- /dev/null +++ b/packages/server/src/enterprise/controllers/role.controller.ts @@ -0,0 +1,70 @@ +import { NextFunction, Request, Response } from 'express' +import { StatusCodes } from 'http-status-codes' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' +import { Role } from '../database/entities/role.entity' +import { RoleService } from '../services/role.service' +import { InternalFlowiseError } from '../../errors/internalFlowiseError' + +export class RoleController { + public async create(req: Request, res: Response, next: NextFunction) { + try { + const roleService = new RoleService() + const newRole = await roleService.createRole(req.body) + return res.status(StatusCodes.CREATED).json(newRole) + } catch (error) { + next(error) + } + } + + public async read(req: Request, res: Response, next: NextFunction) { + let queryRunner + try { + queryRunner = getRunningExpressApp().AppDataSource.createQueryRunner() + await queryRunner.connect() + const query = req.query as Partial + const roleService = new RoleService() + + let role: Role | Role[] | null | (Role & { userCount: number })[] + if (query.id) { + role = await roleService.readRoleById(query.id, queryRunner) + } else if (query.organizationId) { + role = await roleService.readRoleByOrganizationId(query.organizationId, queryRunner) + } else { + role = await roleService.readRoleByGeneral(queryRunner) + } + + return res.status(StatusCodes.OK).json(role) + } catch (error) { + next(error) + } finally { + if (queryRunner) await queryRunner.release() + } + } + + public async update(req: Request, res: Response, next: NextFunction) { + try { + const roleService = new RoleService() + const role = await roleService.updateRole(req.body) + return res.status(StatusCodes.OK).json(role) + } catch (error) { + next(error) + } + } + + public async delete(req: Request, res: Response, next: NextFunction) { + try { + const query = req.query as Partial + if (!query.id) { + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, 'Role ID is required') + } + if (!query.organizationId) { + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, 'Organization ID is required') + } + const roleService = new RoleService() + const role = await roleService.deleteRole(query.organizationId, query.id) + return res.status(StatusCodes.OK).json(role) + } catch (error) { + next(error) + } + } +} diff --git a/packages/server/src/enterprise/controllers/user.controller.ts b/packages/server/src/enterprise/controllers/user.controller.ts new file mode 100644 index 00000000000..2acc458bb3b --- /dev/null +++ b/packages/server/src/enterprise/controllers/user.controller.ts @@ -0,0 +1,77 @@ +import { NextFunction, Request, Response } from 'express' +import { StatusCodes } from 'http-status-codes' +import { InternalFlowiseError } from '../../errors/internalFlowiseError' +import { GeneralErrorMessage } from '../../utils/constants' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' +import { User } from '../database/entities/user.entity' +import { UserErrorMessage, UserService } from '../services/user.service' + +export class UserController { + public async create(req: Request, res: Response, next: NextFunction) { + try { + const userService = new UserService() + const user = await userService.createUser(req.body) + return res.status(StatusCodes.CREATED).json(user) + } catch (error) { + next(error) + } + } + + public async read(req: Request, res: Response, next: NextFunction) { + let queryRunner + try { + queryRunner = getRunningExpressApp().AppDataSource.createQueryRunner() + await queryRunner.connect() + const query = req.query as Partial + const userService = new UserService() + + let user: User | null + if (query.id) { + user = await userService.readUserById(query.id, queryRunner) + if (!user) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + } else if (query.email) { + user = await userService.readUserByEmail(query.email, queryRunner) + if (!user) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + } else { + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, GeneralErrorMessage.UNHANDLED_EDGE_CASE) + } + + if (user) { + delete user.credential + delete user.tempToken + delete user.tokenExpiry + } + return res.status(StatusCodes.OK).json(user) + } catch (error) { + next(error) + } finally { + if (queryRunner) await queryRunner.release() + } + } + + public async update(req: Request, res: Response, next: NextFunction) { + try { + const userService = new UserService() + const currentUser = req.user + if (!currentUser) { + throw new InternalFlowiseError(StatusCodes.UNAUTHORIZED, UserErrorMessage.USER_NOT_FOUND) + } + const { id } = req.body + if (currentUser.id !== id) { + throw new InternalFlowiseError(StatusCodes.FORBIDDEN, UserErrorMessage.USER_NOT_FOUND) + } + const user = await userService.updateUser(req.body) + return res.status(StatusCodes.OK).json(user) + } catch (error) { + next(error) + } + } + + public async test(req: Request, res: Response, next: NextFunction) { + try { + return res.status(StatusCodes.OK).json({ message: 'Hello World' }) + } catch (error) { + next(error) + } + } +} diff --git a/packages/server/src/enterprise/controllers/workspace-user.controller.ts b/packages/server/src/enterprise/controllers/workspace-user.controller.ts new file mode 100644 index 00000000000..f7af6efb169 --- /dev/null +++ b/packages/server/src/enterprise/controllers/workspace-user.controller.ts @@ -0,0 +1,80 @@ +import { Request, Response, NextFunction } from 'express' +import { StatusCodes } from 'http-status-codes' +import { WorkspaceUserService } from '../services/workspace-user.service' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' +import { WorkspaceUser } from '../database/entities/workspace-user.entity' +import { InternalFlowiseError } from '../../errors/internalFlowiseError' +import { GeneralErrorMessage } from '../../utils/constants' + +export class WorkspaceUserController { + public async create(req: Request, res: Response, next: NextFunction) { + try { + const workspaceUserService = new WorkspaceUserService() + const newWorkspaceUser = await workspaceUserService.createWorkspaceUser(req.body) + return res.status(StatusCodes.CREATED).json(newWorkspaceUser) + } catch (error) { + next(error) + } + } + + public async read(req: Request, res: Response, next: NextFunction) { + let queryRunner + try { + queryRunner = getRunningExpressApp().AppDataSource.createQueryRunner() + await queryRunner.connect() + const query = req.query as Partial + const workspaceUserService = new WorkspaceUserService() + + let workspaceUser: any + if (query.workspaceId && query.userId) { + workspaceUser = await workspaceUserService.readWorkspaceUserByWorkspaceIdUserId( + query.workspaceId, + query.userId, + queryRunner + ) + } else if (query.workspaceId) { + workspaceUser = await workspaceUserService.readWorkspaceUserByWorkspaceId(query.workspaceId, queryRunner) + } else if (query.organizationId && query.userId) { + workspaceUser = await workspaceUserService.readWorkspaceUserByOrganizationIdUserId( + query.organizationId, + query.userId, + queryRunner + ) + } else if (query.userId) { + workspaceUser = await workspaceUserService.readWorkspaceUserByUserId(query.userId, queryRunner) + } else if (query.roleId) { + workspaceUser = await workspaceUserService.readWorkspaceUserByRoleId(query.roleId, queryRunner) + } else { + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, GeneralErrorMessage.UNHANDLED_EDGE_CASE) + } + + return res.status(StatusCodes.OK).json(workspaceUser) + } catch (error) { + next(error) + } finally { + if (queryRunner) await queryRunner.release() + } + } + + public async update(req: Request, res: Response, next: NextFunction) { + try { + const workspaceUserService = new WorkspaceUserService() + const workspaceUser = await workspaceUserService.updateWorkspaceUser(req.body) + return res.status(StatusCodes.OK).json(workspaceUser) + } catch (error) { + next(error) + } + } + + public async delete(req: Request, res: Response, next: NextFunction) { + try { + const query = req.query as Partial + + const workspaceUserService = new WorkspaceUserService() + const workspaceUser = await workspaceUserService.deleteWorkspaceUser(query.workspaceId, query.userId) + return res.status(StatusCodes.OK).json(workspaceUser) + } catch (error) { + next(error) + } + } +} diff --git a/packages/server/src/enterprise/controllers/workspace.controller.ts b/packages/server/src/enterprise/controllers/workspace.controller.ts new file mode 100644 index 00000000000..d20edcf9449 --- /dev/null +++ b/packages/server/src/enterprise/controllers/workspace.controller.ts @@ -0,0 +1,240 @@ +import { NextFunction, Request, Response } from 'express' +import { StatusCodes } from 'http-status-codes' +import { QueryRunner } from 'typeorm' +import { InternalFlowiseError } from '../../errors/internalFlowiseError' +import { GeneralErrorMessage } from '../../utils/constants' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' +import { OrganizationUserStatus } from '../database/entities/organization-user.entity' +import { GeneralRole } from '../database/entities/role.entity' +import { WorkspaceUserStatus } from '../database/entities/workspace-user.entity' +import { Workspace } from '../database/entities/workspace.entity' +import { IAssignedWorkspace, LoggedInUser } from '../Interface.Enterprise' +import { OrganizationUserErrorMessage, OrganizationUserService } from '../services/organization-user.service' +import { OrganizationErrorMessage, OrganizationService } from '../services/organization.service' +import { RoleErrorMessage, RoleService } from '../services/role.service' +import { UserErrorMessage, UserService } from '../services/user.service' +import { WorkspaceUserErrorMessage, WorkspaceUserService } from '../services/workspace-user.service' +import { WorkspaceErrorMessage, WorkspaceService } from '../services/workspace.service' + +export class WorkspaceController { + public async create(req: Request, res: Response, next: NextFunction) { + try { + const workspaceUserService = new WorkspaceUserService() + const newWorkspace = await workspaceUserService.createWorkspace(req.body) + return res.status(StatusCodes.CREATED).json(newWorkspace) + } catch (error) { + next(error) + } + } + + public async read(req: Request, res: Response, next: NextFunction) { + let queryRunner + try { + queryRunner = getRunningExpressApp().AppDataSource.createQueryRunner() + await queryRunner.connect() + const query = req.query as Partial + const workspaceService = new WorkspaceService() + + let workspace: + | Workspace + | null + | (Workspace & { + userCount: number + })[] + if (query.id) { + workspace = await workspaceService.readWorkspaceById(query.id, queryRunner) + } else if (query.organizationId) { + workspace = await workspaceService.readWorkspaceByOrganizationId(query.organizationId, queryRunner) + } else { + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, GeneralErrorMessage.UNHANDLED_EDGE_CASE) + } + + return res.status(StatusCodes.OK).json(workspace) + } catch (error) { + next(error) + } finally { + if (queryRunner) await queryRunner.release() + } + } + + public async switchWorkspace(req: Request, res: Response, next: NextFunction) { + if (!req.user) { + return next(new InternalFlowiseError(StatusCodes.UNAUTHORIZED, `Unauthorized: User not found`)) + } + let queryRunner + try { + queryRunner = getRunningExpressApp().AppDataSource.createQueryRunner() + await queryRunner.connect() + const query = req.query as Partial + await queryRunner.startTransaction() + + const workspaceService = new WorkspaceService() + const workspace = await workspaceService.readWorkspaceById(query.id, queryRunner) + if (!workspace) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, WorkspaceErrorMessage.WORKSPACE_NOT_FOUND) + + const userService = new UserService() + const user = await userService.readUserById(req.user.id, queryRunner) + if (!user) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + + const workspaceUserService = new WorkspaceUserService() + const { workspaceUser } = await workspaceUserService.readWorkspaceUserByWorkspaceIdUserId(query.id, req.user.id, queryRunner) + if (!workspaceUser) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, WorkspaceUserErrorMessage.WORKSPACE_USER_NOT_FOUND) + workspaceUser.lastLogin = new Date().toISOString() + workspaceUser.status = WorkspaceUserStatus.ACTIVE + workspaceUser.updatedBy = user.id + await workspaceUserService.saveWorkspaceUser(workspaceUser, queryRunner) + + const organizationUserService = new OrganizationUserService() + const { organizationUser } = await organizationUserService.readOrganizationUserByWorkspaceIdUserId( + workspaceUser.workspaceId, + workspaceUser.userId, + queryRunner + ) + if (!organizationUser) + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, OrganizationUserErrorMessage.ORGANIZATION_USER_NOT_FOUND) + organizationUser.status = OrganizationUserStatus.ACTIVE + organizationUser.updatedBy = user.id + await organizationUserService.saveOrganizationUser(organizationUser, queryRunner) + + const roleService = new RoleService() + const ownerRole = await roleService.readGeneralRoleByName(GeneralRole.OWNER, queryRunner) + const role = await roleService.readRoleById(workspaceUser.roleId, queryRunner) + if (!role) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, RoleErrorMessage.ROLE_NOT_FOUND) + + const orgService = new OrganizationService() + const org = await orgService.readOrganizationById(organizationUser.organizationId, queryRunner) + if (!org) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, OrganizationErrorMessage.ORGANIZATION_NOT_FOUND) + const subscriptionId = org.subscriptionId as string + const customerId = org.customerId as string + const features = await getRunningExpressApp().identityManager.getFeaturesByPlan(subscriptionId) + const productId = await getRunningExpressApp().identityManager.getProductIdFromSubscription(subscriptionId) + + const workspaceUsers = await workspaceUserService.readWorkspaceUserByUserId(req.user.id, queryRunner) + const assignedWorkspaces: IAssignedWorkspace[] = workspaceUsers.map((workspaceUser) => { + return { + id: workspaceUser.workspace.id, + name: workspaceUser.workspace.name, + role: workspaceUser.role?.name, + organizationId: workspaceUser.workspace.organizationId + } as IAssignedWorkspace + }) + + const loggedInUser: LoggedInUser & { role: string; isSSO: boolean } = { + ...req.user, + activeOrganizationId: org.id, + activeOrganizationSubscriptionId: subscriptionId, + activeOrganizationCustomerId: customerId, + activeOrganizationProductId: productId, + isOrganizationAdmin: workspaceUser.roleId === ownerRole.id, + activeWorkspaceId: workspace.id, + activeWorkspace: workspace.name, + assignedWorkspaces, + isApiKeyValidated: true, + isSSO: req.user.ssoProvider ? true : false, + permissions: [...JSON.parse(role.permissions)], + features, + role: role.name, + roleId: role.id + } + + // update the passport session + req.user = { + ...req.user, + ...loggedInUser + } + + // Update passport session + // @ts-ignore + req.session.passport.user = { + ...req.user, + ...loggedInUser + } + + req.session.save((err) => { + if (err) throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, GeneralErrorMessage.UNHANDLED_EDGE_CASE) + }) + + await queryRunner.commitTransaction() + return res.status(StatusCodes.OK).json(loggedInUser) + } catch (error) { + if (queryRunner && !queryRunner.isTransactionActive) { + await queryRunner.rollbackTransaction() + } + next(error) + } finally { + if (queryRunner && !queryRunner.isReleased) { + await queryRunner.release() + } + } + } + + public async update(req: Request, res: Response, next: NextFunction) { + try { + const workspaceService = new WorkspaceService() + const workspace = await workspaceService.updateWorkspace(req.body) + return res.status(StatusCodes.OK).json(workspace) + } catch (error) { + next(error) + } + } + + public async delete(req: Request, res: Response, next: NextFunction) { + let queryRunner: QueryRunner | undefined + try { + queryRunner = getRunningExpressApp().AppDataSource.createQueryRunner() + await queryRunner.connect() + const workspaceId = req.params.id + if (!workspaceId) { + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, WorkspaceErrorMessage.INVALID_WORKSPACE_ID) + } + const workspaceService = new WorkspaceService() + await queryRunner.startTransaction() + + const workspace = await workspaceService.deleteWorkspaceById(queryRunner, workspaceId) + + await queryRunner.commitTransaction() + return res.status(StatusCodes.OK).json(workspace) + } catch (error) { + if (queryRunner && queryRunner.isTransactionActive) await queryRunner.rollbackTransaction() + next(error) + } finally { + if (queryRunner && !queryRunner.isReleased) await queryRunner.release() + } + } + + public async getSharedWorkspacesForItem(req: Request, res: Response, next: NextFunction) { + try { + if (typeof req.params === 'undefined' || !req.params.id) { + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, WorkspaceErrorMessage.INVALID_WORKSPACE_ID) + } + const workspaceService = new WorkspaceService() + return res.json(await workspaceService.getSharedWorkspacesForItem(req.params.id)) + } catch (error) { + next(error) + } + } + + public async setSharedWorkspacesForItem(req: Request, res: Response, next: NextFunction) { + try { + if (!req.user) { + throw new InternalFlowiseError(StatusCodes.UNAUTHORIZED, `Unauthorized: User not found`) + } + if (typeof req.params === 'undefined' || !req.params.id) { + throw new InternalFlowiseError( + StatusCodes.UNAUTHORIZED, + `Error: workspaceController.setSharedWorkspacesForItem - id not provided!` + ) + } + if (!req.body) { + throw new InternalFlowiseError( + StatusCodes.PRECONDITION_FAILED, + `Error: workspaceController.setSharedWorkspacesForItem - body not provided!` + ) + } + const workspaceService = new WorkspaceService() + return res.json(await workspaceService.setSharedWorkspacesForItem(req.params.id, req.body)) + } catch (error) { + next(error) + } + } +} diff --git a/packages/server/src/enterprise/database/entities/EnterpriseEntities.ts b/packages/server/src/enterprise/database/entities/EnterpriseEntities.ts new file mode 100644 index 00000000000..da111d98ab6 --- /dev/null +++ b/packages/server/src/enterprise/database/entities/EnterpriseEntities.ts @@ -0,0 +1,62 @@ +import { Column, Entity, PrimaryGeneratedColumn, UpdateDateColumn } from 'typeorm' +import { ILoginActivity, IWorkspaceShared, IWorkspaceUser } from '../../Interface.Enterprise' + +@Entity('workspace_users') +export class WorkspaceUsers implements IWorkspaceUser { + @PrimaryGeneratedColumn('uuid') + id: string + + @Column({ type: 'text' }) + workspaceId: string + + @Column({ type: 'text' }) + userId: string + + @Column({ type: 'text' }) + role: string +} + +@Entity('workspace_shared') +export class WorkspaceShared implements IWorkspaceShared { + @PrimaryGeneratedColumn('uuid') + id: string + + @Column({ type: 'text' }) + workspaceId: string + + @Column({ type: 'text' }) + sharedItemId: string + + @Column({ type: 'text', name: 'itemType' }) + itemType: string + + @Column({ type: 'timestamp' }) + @UpdateDateColumn() + createdDate: Date + + @Column({ type: 'timestamp' }) + @UpdateDateColumn() + updatedDate: Date +} + +@Entity('login_activity') +export class LoginActivity implements ILoginActivity { + @PrimaryGeneratedColumn('uuid') + id: string + + @Column({ type: 'text' }) + username: string + + @Column({ name: 'activity_code' }) + activityCode: number + + @Column({ name: 'login_mode' }) + loginMode: string + + @Column({ type: 'text' }) + message: string + + @Column({ type: 'timestamp' }) + @UpdateDateColumn() + attemptedDateTime: Date +} diff --git a/packages/server/src/enterprise/database/entities/login-method.entity.ts b/packages/server/src/enterprise/database/entities/login-method.entity.ts new file mode 100644 index 00000000000..b30a991b413 --- /dev/null +++ b/packages/server/src/enterprise/database/entities/login-method.entity.ts @@ -0,0 +1,47 @@ +import { Column, CreateDateColumn, Entity, JoinColumn, ManyToOne, PrimaryGeneratedColumn, UpdateDateColumn } from 'typeorm' +import { User } from './user.entity' +import { Organization } from './organization.entity' + +export enum LoginMethodStatus { + ENABLE = 'enable', + DISABLE = 'disable' +} + +@Entity({ name: 'login_method' }) +export class LoginMethod { + @PrimaryGeneratedColumn('uuid') + id: string + + @Column({ nullable: true }) + organizationId?: string + @ManyToOne(() => Organization, (organization) => organization.id) + @JoinColumn({ name: 'organizationId' }) + organization?: Organization + + @Column({ type: 'varchar', length: 100 }) + name: string + + @Column({ type: 'text' }) + config: string + + @Column({ type: 'varchar', length: 20, default: LoginMethodStatus.ENABLE }) + status?: string + + @CreateDateColumn() + createdDate?: Date + + @UpdateDateColumn() + updatedDate?: Date + + @Column({ nullable: true }) + createdBy?: string + @ManyToOne(() => User, (user) => user.createdByLoginMethod) + @JoinColumn({ name: 'createdBy' }) + createdByUser?: User + + @Column({ nullable: true }) + updatedBy?: string + @ManyToOne(() => User, (user) => user.updatedByLoginMethod) + @JoinColumn({ name: 'updatedBy' }) + updatedByUser?: User +} diff --git a/packages/server/src/enterprise/database/entities/organization-user.entity.ts b/packages/server/src/enterprise/database/entities/organization-user.entity.ts new file mode 100644 index 00000000000..540ba5c28e0 --- /dev/null +++ b/packages/server/src/enterprise/database/entities/organization-user.entity.ts @@ -0,0 +1,52 @@ +import { Column, CreateDateColumn, Entity, JoinColumn, ManyToOne, PrimaryColumn, UpdateDateColumn } from 'typeorm' +import { Organization } from './organization.entity' +import { Role } from './role.entity' +import { User } from './user.entity' + +export enum OrganizationUserStatus { + ACTIVE = 'active', + DISABLE = 'disable', + INVITED = 'invited' +} + +@Entity({ name: 'organization_user' }) +export class OrganizationUser { + @PrimaryColumn() + organizationId: string + @ManyToOne(() => Organization, (organization) => organization.id) + @JoinColumn({ name: 'organizationId' }) + organization: Organization + + @PrimaryColumn() + userId: string + @ManyToOne(() => User, (user) => user.id) + @JoinColumn({ name: 'userId' }) + user: User + + @Column({ type: 'uuid', nullable: false }) + roleId: string + @ManyToOne(() => Role, (role) => role.id) + @JoinColumn({ name: 'roleId' }) + role?: Role + + @Column({ type: 'varchar', length: 20, default: OrganizationUserStatus.ACTIVE }) + status?: string + + @CreateDateColumn() + createdDate?: Date + + @UpdateDateColumn() + updatedDate?: Date + + @Column({ nullable: false }) + createdBy?: string + @ManyToOne(() => User, (user) => user.createdOrganizationUser) + @JoinColumn({ name: 'createdBy' }) + createdByUser?: User + + @Column({ nullable: false }) + updatedBy?: string + @ManyToOne(() => User, (user) => user.updatedOrganizationUser) + @JoinColumn({ name: 'updatedBy' }) + updatedByUser?: User +} diff --git a/packages/server/src/enterprise/database/entities/organization.entity.ts b/packages/server/src/enterprise/database/entities/organization.entity.ts new file mode 100644 index 00000000000..1f6ad47ca57 --- /dev/null +++ b/packages/server/src/enterprise/database/entities/organization.entity.ts @@ -0,0 +1,39 @@ +import { Column, CreateDateColumn, Entity, JoinColumn, ManyToOne, PrimaryGeneratedColumn, UpdateDateColumn } from 'typeorm' +import { User } from './user.entity' + +export enum OrganizationName { + DEFAULT_ORGANIZATION = 'Default Organization' +} + +@Entity() +export class Organization { + @PrimaryGeneratedColumn('uuid') + id: string + + @Column({ type: 'varchar', length: 100, default: OrganizationName.DEFAULT_ORGANIZATION }) + name: string + + @Column({ type: 'varchar', length: 100, nullable: true }) + customerId?: string + + @Column({ type: 'varchar', length: 100, nullable: true }) + subscriptionId?: string + + @CreateDateColumn() + createdDate?: Date + + @UpdateDateColumn() + updatedDate?: Date + + @Column({ nullable: false }) + createdBy?: string + @ManyToOne(() => User, (user) => user.createdOrganizations) + @JoinColumn({ name: 'createdBy' }) + createdByUser?: User + + @Column({ nullable: false }) + updatedBy?: string + @ManyToOne(() => User, (user) => user.updatedOrganizations) + @JoinColumn({ name: 'updatedBy' }) + updatedByUser?: User +} diff --git a/packages/server/src/enterprise/database/entities/role.entity.ts b/packages/server/src/enterprise/database/entities/role.entity.ts new file mode 100644 index 00000000000..86364cb9e7e --- /dev/null +++ b/packages/server/src/enterprise/database/entities/role.entity.ts @@ -0,0 +1,48 @@ +import { Column, CreateDateColumn, Entity, JoinColumn, ManyToOne, PrimaryGeneratedColumn, UpdateDateColumn } from 'typeorm' +import { Organization } from './organization.entity' +import { User } from './user.entity' + +export enum GeneralRole { + OWNER = 'owner', + MEMBER = 'member', + PERSONAL_WORKSPACE = 'personal workspace' +} + +@Entity() +export class Role { + @PrimaryGeneratedColumn('uuid') + id: string + + @Column({ nullable: true }) + organizationId?: string + @ManyToOne(() => Organization, (organization) => organization.id) + @JoinColumn({ name: 'organizationId' }) + organization?: Organization + + @Column({ type: 'varchar', length: 100 }) + name: string + + @Column({ type: 'text', nullable: true }) + description?: string + + @Column({ type: 'text' }) + permissions: string + + @CreateDateColumn() + createdDate?: Date + + @UpdateDateColumn() + updatedDate?: Date + + @Column({ nullable: true }) + createdBy?: string + @ManyToOne(() => User, (user) => user.createdRoles) + @JoinColumn({ name: 'createdBy' }) + createdByUser?: User + + @Column({ nullable: true }) + updatedBy?: string + @ManyToOne(() => User, (user) => user.updatedRoles) + @JoinColumn({ name: 'updatedBy' }) + updatedByUser?: User +} diff --git a/packages/server/src/enterprise/database/entities/user.entity.ts b/packages/server/src/enterprise/database/entities/user.entity.ts new file mode 100644 index 00000000000..3bb455aefda --- /dev/null +++ b/packages/server/src/enterprise/database/entities/user.entity.ts @@ -0,0 +1,92 @@ +import { Column, CreateDateColumn, Entity, JoinColumn, ManyToOne, OneToMany, PrimaryGeneratedColumn, UpdateDateColumn } from 'typeorm' +import { LoginMethod } from './login-method.entity' +import { OrganizationUser } from './organization-user.entity' +import { Organization } from './organization.entity' +import { Role } from './role.entity' +import { WorkspaceUser } from './workspace-user.entity' +import { Workspace } from './workspace.entity' + +export enum UserStatus { + ACTIVE = 'active', + INVITED = 'invited', + UNVERIFIED = 'unverified', + DELETED = 'deleted' +} + +@Entity() +export class User { + @PrimaryGeneratedColumn('uuid') + id: string + + @Column({ type: 'varchar', length: 100 }) + name: string + + @Column({ type: 'varchar', length: 255, unique: true }) + email: string + + @Column({ type: 'text', nullable: true }) + credential?: string | null + + @Column({ type: 'text', nullable: true, unique: true }) + tempToken?: string | null + + @CreateDateColumn({ nullable: true }) + tokenExpiry?: Date | null + + @Column({ type: 'varchar', length: 20, default: UserStatus.UNVERIFIED }) + status: string + + @CreateDateColumn() + createdDate?: Date + + @UpdateDateColumn() + updatedDate?: Date + + @Column({ nullable: false }) + createdBy: string + @ManyToOne(() => User, (user) => user.id, {}) + @JoinColumn({ name: 'createdBy' }) + createdByUser?: User + + @Column({ nullable: false }) + updatedBy: string + @ManyToOne(() => User, (user) => user.id, {}) + @JoinColumn({ name: 'updatedBy' }) + updatedByUser?: User + + @OneToMany(() => Organization, (organization) => organization.createdByUser) + createdOrganizations?: Organization[] + + @OneToMany(() => Organization, (organization) => organization.updatedByUser) + updatedOrganizations?: Organization[] + + @OneToMany(() => Role, (role) => role.createdByUser) + createdRoles?: Role[] + + @OneToMany(() => Role, (role) => role.updatedByUser) + updatedRoles?: Role[] + + @OneToMany(() => OrganizationUser, (organizationUser) => organizationUser.createdByUser) + createdOrganizationUser?: OrganizationUser[] + + @OneToMany(() => OrganizationUser, (organizationUser) => organizationUser.updatedByUser) + updatedOrganizationUser?: OrganizationUser[] + + @OneToMany(() => Workspace, (workspace) => workspace.createdByUser) + createdWorkspace?: Workspace[] + + @OneToMany(() => Workspace, (workspace) => workspace.updatedByUser) + updatedWorkspace?: Workspace[] + + @OneToMany(() => WorkspaceUser, (workspaceUser) => workspaceUser.createdByUser) + createdWorkspaceUser?: WorkspaceUser[] + + @OneToMany(() => WorkspaceUser, (workspaceUser) => workspaceUser.updatedByUser) + updatedByWorkspaceUser?: WorkspaceUser[] + + @OneToMany(() => LoginMethod, (loginMethod) => loginMethod.createdByUser) + createdByLoginMethod?: LoginMethod[] + + @OneToMany(() => LoginMethod, (loginMethod) => loginMethod.updatedByUser) + updatedByLoginMethod?: LoginMethod[] +} diff --git a/packages/server/src/enterprise/database/entities/workspace-user.entity.ts b/packages/server/src/enterprise/database/entities/workspace-user.entity.ts new file mode 100644 index 00000000000..0afb71a9231 --- /dev/null +++ b/packages/server/src/enterprise/database/entities/workspace-user.entity.ts @@ -0,0 +1,55 @@ +import { Column, CreateDateColumn, Entity, JoinColumn, ManyToOne, PrimaryColumn, UpdateDateColumn } from 'typeorm' +import { User } from './user.entity' +import { Role } from './role.entity' +import { Workspace } from './workspace.entity' + +export enum WorkspaceUserStatus { + ACTIVE = 'active', + DISABLE = 'disable', + INVITED = 'invited' +} + +@Entity({ name: 'workspace_user' }) +export class WorkspaceUser { + @PrimaryColumn() + workspaceId: string + @ManyToOne(() => Workspace, (workspace) => workspace.id) + @JoinColumn({ name: 'workspaceId' }) + workspace: Workspace + + @PrimaryColumn() + userId: string + @ManyToOne(() => User, (user) => user.id) + @JoinColumn({ name: 'userId' }) + user: User + + @Column({ type: 'uuid', nullable: false }) + roleId: string + @ManyToOne(() => Role, (role) => role.id) + @JoinColumn({ name: 'roleId' }) + role?: Role + + @Column({ type: 'varchar', length: 20, default: WorkspaceUserStatus.INVITED }) + status?: string + + @CreateDateColumn() + lastLogin?: string + + @CreateDateColumn() + createdDate?: Date + + @UpdateDateColumn() + updatedDate?: Date + + @Column({ nullable: false }) + createdBy?: string + @ManyToOne(() => User, (user) => user.createdWorkspaceUser) + @JoinColumn({ name: 'createdBy' }) + createdByUser?: User + + @Column({ nullable: false }) + updatedBy?: string + @ManyToOne(() => User, (user) => user.updatedByWorkspaceUser) + @JoinColumn({ name: 'updatedBy' }) + updatedByUser?: User +} diff --git a/packages/server/src/enterprise/database/entities/workspace.entity.ts b/packages/server/src/enterprise/database/entities/workspace.entity.ts new file mode 100644 index 00000000000..228d8efc48f --- /dev/null +++ b/packages/server/src/enterprise/database/entities/workspace.entity.ts @@ -0,0 +1,44 @@ +import { Column, CreateDateColumn, Entity, JoinColumn, ManyToOne, PrimaryGeneratedColumn, UpdateDateColumn } from 'typeorm' +import { Organization } from './organization.entity' +import { User } from './user.entity' + +export enum WorkspaceName { + DEFAULT_WORKSPACE = 'Default Workspace', + DEFAULT_PERSONAL_WORKSPACE = 'Personal Workspace' +} + +@Entity() +export class Workspace { + @PrimaryGeneratedColumn('uuid') + id: string + + @Column({ type: 'varchar', length: 100, default: WorkspaceName.DEFAULT_PERSONAL_WORKSPACE }) + name: string + + @Column({ type: 'text', nullable: true }) + description?: string + + @Column({ nullable: false }) + organizationId?: string + @ManyToOne(() => Organization, (organization) => organization.id) + @JoinColumn({ name: 'organizationId' }) + organization?: Organization + + @CreateDateColumn() + createdDate?: Date + + @UpdateDateColumn() + updatedDate?: Date + + @Column({ nullable: false }) + createdBy?: string + @ManyToOne(() => User, (user) => user.createdWorkspace) + @JoinColumn({ name: 'createdBy' }) + createdByUser?: User + + @Column({ nullable: false }) + updatedBy?: string + @ManyToOne(() => User, (user) => user.updatedWorkspace) + @JoinColumn({ name: 'updatedBy' }) + updatedByUser?: User +} diff --git a/packages/server/src/enterprise/database/migrations/mariadb/1720230151482-AddAuthTables.ts b/packages/server/src/enterprise/database/migrations/mariadb/1720230151482-AddAuthTables.ts new file mode 100644 index 00000000000..8b3f2134d20 --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/mariadb/1720230151482-AddAuthTables.ts @@ -0,0 +1,46 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddAuthTables1720230151482 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS \`user\` ( + \`id\` varchar(36) NOT NULL, + \`name\` varchar(255), + \`role\` varchar(20) NOT NULL, + \`email\` varchar(100) NOT NULL, + \`status\` varchar(20) NOT NULL, + \`credential\` text, + \`tempToken\` text, + \`tokenExpiry\` datetime(6), + \`activeWorkspaceId\` varchar(100), + \`lastLogin\` datetime(6), + PRIMARY KEY (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_520_ci;` + ) + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS \`roles\` ( + \`id\` varchar(36) NOT NULL, + \`name\` varchar(255), + \`description\` text, + \`permissions\` text, + PRIMARY KEY (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_520_ci;` + ) + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS \`login_activity\` ( + \`id\` varchar(36) NOT NULL, + \`username\` varchar(255), + \`message\` varchar(255) NOT NULL, + \`activity_code\` INT NOT NULL, + \`attemptedDateTime\` datetime(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6) ON UPDATE CURRENT_TIMESTAMP(6), + PRIMARY KEY (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_520_ci;` + ) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP TABLE user`) + await queryRunner.query(`DROP TABLE roles`) + await queryRunner.query(`DROP TABLE login_activity`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/mariadb/1725437498242-AddWorkspace.ts b/packages/server/src/enterprise/database/migrations/mariadb/1725437498242-AddWorkspace.ts new file mode 100644 index 00000000000..0acb7733b4e --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/mariadb/1725437498242-AddWorkspace.ts @@ -0,0 +1,52 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' +import { ensureColumnExists } from './mariaDbCustomFunctions' + +export class AddWorkspace1725437498242 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS \`workspace\` ( + \`id\` varchar(36) NOT NULL, + \`name\` varchar(255) NOT NULL, + \`description\` text DEFAULT NULL, + \`createdDate\` datetime(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6), + \`updatedDate\` datetime(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6) ON UPDATE CURRENT_TIMESTAMP(6), + PRIMARY KEY (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_520_ci;` + ) + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS \`workspace_users\` ( + \`id\` varchar(36) NOT NULL, + \`workspaceId\` varchar(36) NOT NULL, + \`userId\` varchar(36) NOT NULL, + \`role\` varchar(255) DEFAULT NULL, + PRIMARY KEY (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_520_ci;` + ) + + await ensureColumnExists(queryRunner, 'chat_flow', 'workspaceId', 'varchar(36)') + await ensureColumnExists(queryRunner, 'tool', 'workspaceId', 'varchar(36)') + await ensureColumnExists(queryRunner, 'assistant', 'workspaceId', 'varchar(36)') + await ensureColumnExists(queryRunner, 'credential', 'workspaceId', 'varchar(36)') + await ensureColumnExists(queryRunner, 'document_store', 'workspaceId', 'varchar(36)') + await ensureColumnExists(queryRunner, 'evaluation', 'workspaceId', 'varchar(36)') + await ensureColumnExists(queryRunner, 'evaluator', 'workspaceId', 'varchar(36)') + await ensureColumnExists(queryRunner, 'dataset', 'workspaceId', 'varchar(36)') + await ensureColumnExists(queryRunner, 'apikey', 'workspaceId', 'varchar(36)') + await ensureColumnExists(queryRunner, 'variable', 'workspaceId', 'varchar(36)') + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP TABLE workspace`) + await queryRunner.query(`DROP TABLE workspace_users`) + + await queryRunner.query(`ALTER TABLE \`chat_message\` DROP COLUMN \`workspaceId\`;`) + await queryRunner.query(`ALTER TABLE \`tool\` DROP COLUMN \`workspaceId\`;`) + await queryRunner.query(`ALTER TABLE \`assistant\` DROP COLUMN \`workspaceId\`;`) + await queryRunner.query(`ALTER TABLE \`credential\` DROP COLUMN \`workspaceId\`;`) + await queryRunner.query(`ALTER TABLE \`document_store\` DROP COLUMN \`workspaceId\`;`) + await queryRunner.query(`ALTER TABLE \`evaluation\` DROP COLUMN \`workspaceId\`;`) + await queryRunner.query(`ALTER TABLE \`dataset\` DROP COLUMN \`workspaceId\`;`) + await queryRunner.query(`ALTER TABLE \`apikey\` DROP COLUMN \`workspaceId\`;`) + await queryRunner.query(`ALTER TABLE \`variable\` DROP COLUMN \`workspaceId\`;`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/mariadb/1726654922034-AddWorkspaceShared.ts b/packages/server/src/enterprise/database/migrations/mariadb/1726654922034-AddWorkspaceShared.ts new file mode 100644 index 00000000000..16375d79bb8 --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/mariadb/1726654922034-AddWorkspaceShared.ts @@ -0,0 +1,21 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddWorkspaceShared1726654922034 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS \`workspace_shared\` ( + \`id\` varchar(36) NOT NULL, + \`workspaceId\` varchar(50) NOT NULL, + \`sharedItemId\` varchar(50) NOT NULL, + \`itemType\` varchar(50) NOT NULL, + \`createdDate\` datetime(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6), + \`updatedDate\` datetime(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6) ON UPDATE CURRENT_TIMESTAMP(6), + PRIMARY KEY (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_520_ci;` + ) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP TABLE workspace_shared`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/mariadb/1726655750383-AddWorkspaceIdToCustomTemplate.ts b/packages/server/src/enterprise/database/migrations/mariadb/1726655750383-AddWorkspaceIdToCustomTemplate.ts new file mode 100644 index 00000000000..c6b6c2ab2bd --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/mariadb/1726655750383-AddWorkspaceIdToCustomTemplate.ts @@ -0,0 +1,11 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddWorkspaceIdToCustomTemplate1726655750383 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE \`custom_template\` ADD COLUMN \`workspaceId\` varchar(36);`) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE \`custom_template\` DROP COLUMN \`workspaceId\`;`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/mariadb/1727798417345-AddOrganization.ts b/packages/server/src/enterprise/database/migrations/mariadb/1727798417345-AddOrganization.ts new file mode 100644 index 00000000000..7397ceb40ef --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/mariadb/1727798417345-AddOrganization.ts @@ -0,0 +1,26 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddOrganization1727798417345 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS \`organization\` ( + \`id\` varchar(36) NOT NULL, + \`name\` varchar(255) NOT NULL, + \`adminUserId\` varchar(255) NULL, + \`defaultWsId\` varchar(255) NULL, + \`organization_type\` varchar(255) NULL, + \`createdDate\` timestamp(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6), + \`updatedDate\` timestamp(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6) ON UPDATE CURRENT_TIMESTAMP(6), + PRIMARY KEY (\`id\`), + KEY \`idx_organization_id\` (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_520_ci;` + ) + await queryRunner.query(`ALTER TABLE \`workspace\` ADD COLUMN \`organizationId\` varchar(36);`) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP TABLE IF EXISTS \`organization\`;`) + + await queryRunner.query(`ALTER TABLE \`workspace\` DROP COLUMN \`organizationId\`;`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/mariadb/1729130948686-LinkWorkspaceId.ts b/packages/server/src/enterprise/database/migrations/mariadb/1729130948686-LinkWorkspaceId.ts new file mode 100644 index 00000000000..b866afb9689 --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/mariadb/1729130948686-LinkWorkspaceId.ts @@ -0,0 +1,233 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class LinkWorkspaceId1729130948686 implements MigrationInterface { + name = 'LinkWorkspaceId1729130948686' + + public async up(queryRunner: QueryRunner): Promise { + // step 1 - add index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`apikey\` + ADD INDEX \`idx_apikey_workspaceId\` (\`workspaceId\`), + ADD CONSTRAINT \`fk_apikey_workspaceId\` + FOREIGN KEY (\`workspaceId\`) + REFERENCES \`workspace\`(\`id\`); + `) + + // step 1 - add index and foreign key for activeWorkspaceId + await queryRunner.query(` + ALTER TABLE \`user\` + ADD INDEX \`idx_user_activeWorkspaceId\` (\`activeWorkspaceId\`), + ADD CONSTRAINT \`fk_user_activeWorkspaceId\` + FOREIGN KEY (\`activeWorkspaceId\`) + REFERENCES \`workspace\`(\`id\`); + `) + + // step 1 - add index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`workspace_users\` + ADD INDEX \`idx_workspace_users_workspaceId\` (\`workspaceId\`), + ADD CONSTRAINT \`fk_workspace_users_workspaceId\` + FOREIGN KEY (\`workspaceId\`) + REFERENCES \`workspace\`(\`id\`); + `) + + // step 1 - add index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`chat_flow\` + ADD INDEX \`idx_chat_flow_workspaceId\` (\`workspaceId\`), + ADD CONSTRAINT \`fk_chat_flow_workspaceId\` + FOREIGN KEY (\`workspaceId\`) + REFERENCES \`workspace\`(\`id\`); + `) + + // step 1 - add index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`tool\` + ADD INDEX \`idx_tool_workspaceId\` (\`workspaceId\`), + ADD CONSTRAINT \`fk_tool_workspaceId\` + FOREIGN KEY (\`workspaceId\`) + REFERENCES \`workspace\`(\`id\`); + `) + + // step 1 - add index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`assistant\` + ADD INDEX \`idx_assistant_workspaceId\` (\`workspaceId\`), + ADD CONSTRAINT \`fk_assistant_workspaceId\` + FOREIGN KEY (\`workspaceId\`) + REFERENCES \`workspace\`(\`id\`); + `) + + // step 1 - add index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`credential\` + ADD INDEX \`idx_credential_workspaceId\` (\`workspaceId\`), + ADD CONSTRAINT \`fk_credential_workspaceId\` + FOREIGN KEY (\`workspaceId\`) + REFERENCES \`workspace\`(\`id\`); + `) + + // step 1 - add index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`document_store\` + ADD INDEX \`idx_document_store_workspaceId\` (\`workspaceId\`), + ADD CONSTRAINT \`fk_document_store_workspaceId\` + FOREIGN KEY (\`workspaceId\`) + REFERENCES \`workspace\`(\`id\`); + `) + + // step 1 - add index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`evaluation\` + ADD INDEX \`idx_evaluation_workspaceId\` (\`workspaceId\`), + ADD CONSTRAINT \`fk_evaluation_workspaceId\` + FOREIGN KEY (\`workspaceId\`) + REFERENCES \`workspace\`(\`id\`); + `) + + // step 1 - add index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`evaluator\` + ADD INDEX \`idx_evaluator_workspaceId\` (\`workspaceId\`), + ADD CONSTRAINT \`fk_evaluator_workspaceId\` + FOREIGN KEY (\`workspaceId\`) + REFERENCES \`workspace\`(\`id\`); + `) + + // step 1 - add index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`dataset\` + ADD INDEX \`idx_dataset_workspaceId\` (\`workspaceId\`), + ADD CONSTRAINT \`fk_dataset_workspaceId\` + FOREIGN KEY (\`workspaceId\`) + REFERENCES \`workspace\`(\`id\`); + `) + + // step 1 - add index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`variable\` + ADD INDEX \`idx_variable_workspaceId\` (\`workspaceId\`), + ADD CONSTRAINT \`fk_variable_workspaceId\` + FOREIGN KEY (\`workspaceId\`) + REFERENCES \`workspace\`(\`id\`); + `) + + // step 1 - add index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`workspace_shared\` + ADD INDEX \`idx_workspace_shared_workspaceId\` (\`workspaceId\`), + ADD CONSTRAINT \`fk_workspace_shared_workspaceId\` + FOREIGN KEY (\`workspaceId\`) + REFERENCES \`workspace\`(\`id\`); + `) + + // step 1 - add index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`custom_template\` + ADD INDEX \`idx_custom_template_workspaceId\` (\`workspaceId\`), + ADD CONSTRAINT \`fk_custom_template_workspaceId\` + FOREIGN KEY (\`workspaceId\`) + REFERENCES \`workspace\`(\`id\`); + `) + } + + public async down(queryRunner: QueryRunner): Promise { + // step 1 - drop index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`apikey\` + DROP INDEX \`idx_apikey_workspaceId\`, + DROP FOREIGN KEY \`fk_apikey_workspaceId\`; + `) + + // step 1 - drop index and foreign key for activeWorkspaceId + await queryRunner.query(` + ALTER TABLE \`user\` + DROP INDEX \`idx_user_activeWorkspaceId\`, + DROP FOREIGN KEY \`fk_user_activeWorkspaceId\`; + `) + + // step 1 - drop index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`workspace_users\` + DROP INDEX \`idx_workspace_users_workspaceId\`, + DROP FOREIGN KEY \`fk_workspace_users_workspaceId\`; + `) + + // step 1 - drop index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`chat_flow\` + DROP INDEX \`idx_chat_flow_workspaceId\`, + DROP FOREIGN KEY \`fk_chat_flow_workspaceId\`; + `) + + // step 1 - drop index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`tool\` + DROP INDEX \`idx_tool_workspaceId\`, + DROP FOREIGN KEY \`fk_tool_workspaceId\`; + `) + + // step 1 - drop index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`assistant\` + DROP INDEX \`idx_assistant_workspaceId\`, + DROP FOREIGN KEY \`fk_assistant_workspaceId\`; + `) + + // step 1 - drop index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`credential\` + DROP INDEX \`idx_credential_workspaceId\`, + DROP FOREIGN KEY \`fk_credential_workspaceId\`; + `) + + // step 1 - drop index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`document_store\` + DROP INDEX \`idx_document_store_workspaceId\`, + DROP FOREIGN KEY \`fk_document_store_workspaceId\`; + `) + + // step 1 - drop index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`evaluation\` + DROP INDEX \`idx_evaluation_workspaceId\`, + DROP FOREIGN KEY \`fk_evaluation_workspaceId\`; + `) + + // step 1 - drop index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`evaluator\` + DROP INDEX \`idx_evaluator_workspaceId\`, + DROP FOREIGN KEY \`fk_evaluator_workspaceId\`; + `) + + // step 1 - drop index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`dataset\` + DROP INDEX \`idx_dataset_workspaceId\`, + DROP FOREIGN KEY \`fk_dataset_workspaceId\`; + `) + + // step 1 - drop index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`variable\` + DROP INDEX \`idx_variable_workspaceId\`, + DROP FOREIGN KEY \`fk_variable_workspaceId\`; + `) + + // step 1 - drop index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`workspace_shared\` + DROP INDEX \`idx_workspace_shared_workspaceId\`, + DROP FOREIGN KEY \`fk_workspace_shared_workspaceId\`; + `) + + // step 1 - drop index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`custom_template\` + DROP INDEX \`idx_custom_template_workspaceId\`, + DROP FOREIGN KEY \`fk_custom_template_workspaceId\`; + `) + } +} diff --git a/packages/server/src/enterprise/database/migrations/mariadb/1729133111652-LinkOrganizationId.ts b/packages/server/src/enterprise/database/migrations/mariadb/1729133111652-LinkOrganizationId.ts new file mode 100644 index 00000000000..42ab342a7ab --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/mariadb/1729133111652-LinkOrganizationId.ts @@ -0,0 +1,25 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class LinkOrganizationId1729133111652 implements MigrationInterface { + name = 'LinkOrganizationId1729133111652' + + public async up(queryRunner: QueryRunner): Promise { + // step 1 - add index and foreign key for organizationId + await queryRunner.query(` + ALTER TABLE \`workspace\` + ADD INDEX \`idx_workspace_organizationId\` (\`organizationId\`), + ADD CONSTRAINT \`fk_workspace_organizationId\` + FOREIGN KEY (\`organizationId\`) + REFERENCES \`organization\`(\`id\`); + `) + } + + public async down(queryRunner: QueryRunner): Promise { + // step 1 - drop index and foreign key for organizationId + await queryRunner.query(` + ALTER TABLE \`workspace\` + DROP INDEX \`idx_workspace_organizationId\`, + DROP FOREIGN KEY \`fk_workspace_organizationId\`; + `) + } +} diff --git a/packages/server/src/enterprise/database/migrations/mariadb/1730519457880-AddSSOColumns.ts b/packages/server/src/enterprise/database/migrations/mariadb/1730519457880-AddSSOColumns.ts new file mode 100644 index 00000000000..2181b2060f2 --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/mariadb/1730519457880-AddSSOColumns.ts @@ -0,0 +1,16 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' +import { ensureColumnExists } from './mariaDbCustomFunctions' + +export class AddSSOColumns1730519457880 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await ensureColumnExists(queryRunner, 'organization', 'sso_config', 'text') + await ensureColumnExists(queryRunner, 'user', 'user_type', 'varchar(10)') + await ensureColumnExists(queryRunner, 'login_activity', 'login_mode', 'varchar(25)') + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE "organization" DROP COLUMN "sso_config";`) + await queryRunner.query(`ALTER TABLE "user" DROP COLUMN "user_type";`) + await queryRunner.query(`ALTER TABLE "login_activity" DROP COLUMN "login_mode";`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/mariadb/1734074497540-AddPersonalWorkspace.ts b/packages/server/src/enterprise/database/migrations/mariadb/1734074497540-AddPersonalWorkspace.ts new file mode 100644 index 00000000000..0a27257793c --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/mariadb/1734074497540-AddPersonalWorkspace.ts @@ -0,0 +1,29 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' +import { v4 as uuidv4 } from 'uuid' + +export class AddPersonalWorkspace1734074497540 implements MigrationInterface { + name = 'AddPersonalWorkspace1734074497540' + + public async up(queryRunner: QueryRunner): Promise { + const users = await queryRunner.query(`select * from \`user\`;`) + const organization = await queryRunner.query(`select \`id\` from \`organization\`;`) + for (let user of users) { + const workspaceDescription = 'Personal Workspace of ' + user.id + const workspaceId = uuidv4() + + await queryRunner.query(` + insert into \`workspace\` (\`id\`, \`name\`, \`description\`, \`organizationId\`) + values('${workspaceId}', 'Personal Workspace', '${workspaceDescription}', '${organization[0].id}'); + `) + + const workspaceUsersId = uuidv4() + + await queryRunner.query(` + insert into \`workspace_users\` (\`id\`, \`workspaceId\`, \`userId\`, \`role\`) + values('${workspaceUsersId}', '${workspaceId}', '${user.id}', 'pw'); + `) + } + } + + public async down(): Promise {} +} diff --git a/packages/server/src/enterprise/database/migrations/mariadb/1737076223692-RefactorEnterpriseDatabase.ts b/packages/server/src/enterprise/database/migrations/mariadb/1737076223692-RefactorEnterpriseDatabase.ts new file mode 100644 index 00000000000..f571a44aea8 --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/mariadb/1737076223692-RefactorEnterpriseDatabase.ts @@ -0,0 +1,494 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' +import { decrypt, encrypt } from '../../../utils/encryption.util' +import { LoginMethodStatus } from '../../entities/login-method.entity' +import { OrganizationUserStatus } from '../../entities/organization-user.entity' +import { OrganizationName } from '../../entities/organization.entity' +import { GeneralRole } from '../../entities/role.entity' +import { UserStatus } from '../../entities/user.entity' +import { WorkspaceUserStatus } from '../../entities/workspace-user.entity' +import { WorkspaceName } from '../../entities/workspace.entity' + +export class RefactorEnterpriseDatabase1737076223692 implements MigrationInterface { + name = 'RefactorEnterpriseDatabase1737076223692' + private async modifyTable(queryRunner: QueryRunner): Promise { + /*------------------------------------- + --------------- user ----------------- + --------------------------------------*/ + // rename user table to temp_user + await queryRunner.query(`alter table \`user\` rename to \`temp_user\`;`) + + // create user table + await queryRunner.query(` + create table \`user\` ( + \`id\` varchar(36) default (uuid()) primary key, + \`name\` varchar(100) not null, + \`email\` varchar(255) not null unique, + \`credential\` text null, + \`tempToken\` text null, + \`tokenExpiry\` timestamp null, + \`status\` varchar(20) default '${UserStatus.UNVERIFIED}' not null, + \`createdDate\` timestamp default now() not null, + \`updatedDate\` timestamp default now() not null, + \`createdBy\` varchar(36) not null, + \`updatedBy\` varchar(36) not null, + constraint \`fk_user_createdBy\` foreign key (\`createdBy\`) references \`user\` (\`id\`), + constraint \`fk_user_updatedBy\` foreign key (\`updatedBy\`) references \`user\` (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_520_ci;; + `) + + /*------------------------------------- + ----------- organization -------------- + --------------------------------------*/ + // rename organization table to temp_organization + await queryRunner.query(`alter table \`organization\` rename to \`temp_organization\`;`) + + // create organization table + await queryRunner.query(` + create table \`organization\` ( + \`id\` varchar(36) default (uuid()) primary key, + \`name\` varchar(100) default '${OrganizationName.DEFAULT_ORGANIZATION}' not null, + \`customerId\` varchar(100) null, + \`subscriptionId\` varchar(100) null, + \`createdDate\` timestamp default now() not null, + \`updatedDate\` timestamp default now() not null, + \`createdBy\` varchar(36) not null, + \`updatedBy\` varchar(36) not null, + constraint \`fk_organization_createdBy\` foreign key (\`createdBy\`) references \`user\` (\`id\`), + constraint \`fk_organization_updatedBy\` foreign key (\`updatedBy\`) references \`user\` (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_520_ci;; + `) + + /*------------------------------------- + ----------- login method -------------- + --------------------------------------*/ + // create login_method table + await queryRunner.query(` + create table \`login_method\` ( + \`id\` varchar(36) default (uuid()) primary key, + \`organizationId\` varchar(36) null, + \`name\` varchar(100) not null, + \`config\` text not null, + \`status\` varchar(20) default '${LoginMethodStatus.ENABLE}' not null, + \`createdDate\` timestamp default now() not null, + \`updatedDate\` timestamp default now() not null, + \`createdBy\` varchar(36) null, + \`updatedBy\` varchar(36) null, + constraint \`fk_login_method_organizationId\` foreign key (\`organizationId\`) references \`organization\` (\`id\`), + constraint \`fk_login_method_createdBy\` foreign key (\`createdBy\`) references \`user\` (\`id\`), + constraint \`fk_login_method_updatedBy\` foreign key (\`updatedBy\`) references \`user\` (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_520_ci;; + `) + + /*------------------------------------- + --------------- role ------------------ + --------------------------------------*/ + // rename roles table to temp_role + await queryRunner.query(`alter table \`roles\` rename to \`temp_role\`;`) + + // create organization_login_method table + await queryRunner.query(` + create table \`role\` ( + \`id\` varchar(36) default (uuid()) primary key, + \`organizationId\` varchar(36) null, + \`name\` varchar(100) not null, + \`description\` text null, + \`permissions\` text not null, + \`createdDate\` timestamp default now() not null, + \`updatedDate\` timestamp default now() not null, + \`createdBy\` varchar(36) null, + \`updatedBy\` varchar(36) null, + constraint \`fk_role_organizationId\` foreign key (\`organizationId\`) references \`organization\` (\`id\`), + constraint \`fk_role_createdBy\` foreign key (\`createdBy\`) references \`user\` (\`id\`), + constraint \`fk_role_updatedBy\` foreign key (\`updatedBy\`) references \`user\` (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_520_ci;; + `) + + /*------------------------------------- + ---------- organization_user ---------- + --------------------------------------*/ + // create organization_user table + await queryRunner.query(` + create table \`organization_user\` ( + \`organizationId\` varchar(36) not null, + \`userId\` varchar(36) not null, + \`roleId\` varchar(36) not null, + \`status\` varchar(20) default '${OrganizationUserStatus.ACTIVE}' not null, + \`createdDate\` timestamp default now() not null, + \`updatedDate\` timestamp default now() not null, + \`createdBy\` varchar(36) not null, + \`updatedBy\` varchar(36) not null, + constraint \`pk_organization_user\` primary key (\`organizationId\`, \`userId\`), + constraint \`fk_organization_user_organizationId\` foreign key (\`organizationId\`) references \`organization\` (\`id\`), + constraint \`fk_organization_user_userId\` foreign key (\`userId\`) references \`user\` (\`id\`), + constraint \`fk_organization_user_roleId\` foreign key (\`roleId\`) references \`role\` (\`id\`), + constraint \`fk_organization_user_createdBy\` foreign key (\`createdBy\`) references \`user\` (\`id\`), + constraint \`fk_organization_user_updatedBy\` foreign key (\`updatedBy\`) references \`user\` (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_520_ci;; + `) + + /*------------------------------------- + ------------- workspace --------------- + --------------------------------------*/ + // modify workspace table + await queryRunner.query(` + alter table \`workspace\` + drop constraint \`fk_workspace_organizationId\`; + `) + + await queryRunner.query(` + alter table \`workspace\` + modify column \`organizationId\` varchar(36) not null, + modify column \`name\` varchar(100), + modify column \`description\` text; + `) + + await queryRunner.query(` + alter table \`workspace\` + add column \`createdBy\` varchar(36) null, + add column \`updatedBy\` varchar(36) null; + `) + + // remove first if needed will be add back, will cause insert to slow + await queryRunner.query(` + drop index \`idx_workspace_organizationId\` on \`workspace\`; + `) + + /*------------------------------------- + ----------- workspace_user ------------ + --------------------------------------*/ + // rename workspace_users table to temp_workspace_user + await queryRunner.query(`alter table \`workspace_users\` rename to \`temp_workspace_user\`;`) + + // create workspace_user table + await queryRunner.query(` + create table \`workspace_user\` ( + \`workspaceId\` varchar(36) not null, + \`userId\` varchar(36) not null, + \`roleId\` varchar(36) not null, + \`status\` varchar(20) default '${WorkspaceUserStatus.INVITED}' not null, + \`lastLogin\` timestamp null, + \`createdDate\` timestamp default now() not null, + \`updatedDate\` timestamp default now() not null, + \`createdBy\` varchar(36) not null, + \`updatedBy\` varchar(36) not null, + constraint \`pk_workspace_user\` primary key (\`workspaceId\`, \`userId\`), + constraint \`fk_workspace_user_workspaceId\` foreign key (\`workspaceId\`) references \`workspace\` (\`id\`), + constraint \`fk_workspace_user_userId\` foreign key (\`userId\`) references \`user\` (\`id\`), + constraint \`fk_workspace_user_roleId\` foreign key (\`roleId\`) references \`role\` (\`id\`), + constraint \`fk_workspace_user_createdBy\` foreign key (\`createdBy\`) references \`user\` (\`id\`), + constraint \`fk_workspace_user_updatedBy\` foreign key (\`updatedBy\`) references \`user\` (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_520_ci;; + `) + } + + private async deleteWorkspaceWithoutUser(queryRunner: QueryRunner) { + const workspaceWithoutUser = await queryRunner.query(` + select w.\`id\` as \`id\` from \`workspace_user\` as \`wu\` + right join \`workspace\` as \`w\` on \`wu\`.\`workspaceId\` = \`w\`.\`id\` + where \`wu\`.\`userId\` is null; + `) + const workspaceIds = workspaceWithoutUser.map((workspace: { id: string }) => `'${workspace.id}'`).join(',') + + // Delete related records from other tables that reference the deleted workspaces + if (workspaceIds && workspaceIds.length > 0) { + await queryRunner.query(` + delete from \`workspace_user\` where \`workspaceId\` in (${workspaceIds}); + `) + await queryRunner.query(` + delete from \`apikey\` where \`workspaceId\` in (${workspaceIds}); + `) + await queryRunner.query(` + delete from \`assistant\` where \`workspaceId\` in (${workspaceIds}); + `) + const chatflows = await queryRunner.query(` + select id from \`chat_flow\` where \`workspaceId\` in (${workspaceIds}); + `) + const chatflowIds = chatflows.map((chatflow: { id: string }) => `'${chatflow.id}'`).join(',') + if (chatflowIds && chatflowIds.length > 0) { + await queryRunner.query(` + delete from \`chat_flow\` where \`workspaceId\` in (${workspaceIds}); + `) + await queryRunner.query(` + delete from \`upsert_history\` where \`chatflowid\` in (${chatflowIds}); + `) + await queryRunner.query(` + delete from \`chat_message\` where \`chatflowid\` in (${chatflowIds}); + `) + await queryRunner.query(` + delete from \`chat_message_feedback\` where \`chatflowid\` in (${chatflowIds}); + `) + } + await queryRunner.query(` + delete from \`credential\` where \`workspaceId\` in (${workspaceIds}); + `) + await queryRunner.query(` + delete from \`custom_template\` where \`workspaceId\` in (${workspaceIds}); + `) + const datasets = await queryRunner.query(` + select id from \`dataset\` where \`workspaceId\` in (${workspaceIds}); + `) + const datasetIds = datasets.map((dataset: { id: string }) => `'${dataset.id}'`).join(',') + if (datasetIds && datasetIds.length > 0) { + await queryRunner.query(` + delete from \`dataset\` where \`workspaceId\` in (${workspaceIds}); + `) + await queryRunner.query(` + delete from \`dataset_row\` where \`datasetId\` in (${datasetIds}); + `) + } + const documentStores = await queryRunner.query(` + select id from \`document_store\` where \`workspaceId\` in (${workspaceIds}); + `) + const documentStoreIds = documentStores.map((documentStore: { id: string }) => `'${documentStore.id}'`).join(',') + if (documentStoreIds && documentStoreIds.length > 0) { + await queryRunner.query(` + delete from \`document_store\` where \`workspaceId\` in (${workspaceIds}); + `) + await queryRunner.query(` + delete from \`document_store_file_chunk\` where \`storeId\` in (${documentStoreIds}); + `) + } + const evaluations = await queryRunner.query(` + select id from \`evaluation\` where \`workspaceId\` in (${workspaceIds}); + `) + const evaluationIds = evaluations.map((evaluation: { id: string }) => `'${evaluation.id}'`).join(',') + if (evaluationIds && evaluationIds.length > 0) { + await queryRunner.query(` + delete from \`evaluation\` where \`workspaceId\` in (${workspaceIds}); + `) + await queryRunner.query(` + delete from \`evaluation_run\` where \`evaluationId\` in (${evaluationIds}); + `) + } + await queryRunner.query(` + delete from \`evaluator\` where \`workspaceId\` in (${workspaceIds}); + `) + await queryRunner.query(` + delete from \`tool\` where \`workspaceId\` in (${workspaceIds}); + `) + await queryRunner.query(` + delete from \`variable\` where \`workspaceId\` in (${workspaceIds}); + `) + await queryRunner.query(` + delete from \`workspace_shared\` where \`workspaceId\` in (${workspaceIds}); + `) + await queryRunner.query(` + delete from \`workspace\` where \`id\` in (${workspaceIds}); + `) + } + } + + private async populateTable(queryRunner: QueryRunner): Promise { + // insert generalRole + const generalRole = [ + { + name: 'owner', + description: 'Has full control over the organization.', + permissions: '["organization","workspace"]' + }, + { + name: 'member', + description: 'Has limited control over the organization.', + permissions: '[]' + }, + { + name: 'personal workspace', + description: 'Has full control over the personal workspace', + permissions: + '[ "chatflows:view", "chatflows:create", "chatflows:update", "chatflows:duplicate", "chatflows:delete", "chatflows:export", "chatflows:import", "chatflows:config", "chatflows:domains", "agentflows:view", "agentflows:create", "agentflows:update", "agentflows:duplicate", "agentflows:delete", "agentflows:export", "agentflows:import", "agentflows:config", "agentflows:domains", "tools:view", "tools:create", "tools:update", "tools:delete", "tools:export", "assistants:view", "assistants:create", "assistants:update", "assistants:delete", "credentials:view", "credentials:create", "credentials:update", "credentials:delete", "credentials:share", "variables:view", "variables:create", "variables:update", "variables:delete", "apikeys:view", "apikeys:create", "apikeys:update", "apikeys:delete", "apikeys:import", "documentStores:view", "documentStores:create", "documentStores:update", "documentStores:delete", "documentStores:add-loader", "documentStores:delete-loader", "documentStores:preview-process", "documentStores:upsert-config", "datasets:view", "datasets:create", "datasets:update", "datasets:delete", "evaluators:view", "evaluators:create", "evaluators:update", "evaluators:delete", "evaluations:view", "evaluations:create", "evaluations:update", "evaluations:delete", "evaluations:run", "templates:marketplace", "templates:custom", "templates:custom-delete", "templates:toolexport", "templates:flowexport", "templates:custom-share", "workspace:export", "workspace:import", "executions:view", "executions:delete" ]' + } + ] + for (let role of generalRole) { + await queryRunner.query(` + insert into \`role\`(\`name\`, \`description\`, \`permissions\`) + values('${role.name}', '${role.description}', '${role.permissions}'); + `) + } + + const users = await queryRunner.query('select * from `temp_user`;') + const noExistingData = users.length > 0 === false + if (noExistingData) return + + const organizations = await queryRunner.query('select * from `temp_organization`;') + const organizationId = organizations[0].id + const adminUserId = organizations[0].adminUserId + const ssoConfig = organizations[0].sso_config ? JSON.parse(await decrypt(organizations[0].sso_config)).providers : [] + + /*------------------------------------- + --------------- user ----------------- + --------------------------------------*/ + // insert admin user first + await queryRunner.query(` + insert into \`user\` (\`id\`, \`name\`, \`email\`, \`credential\`, \`tempToken\`, \`tokenExpiry\`, \`status\`, \`createdBy\`, \`updatedBy\`) + select tu.\`id\`, coalesce(tu.\`name\`, tu.\`email\`), tu.\`email\`, tu.\`credential\`, tu.\`tempToken\`, tu.\`tokenExpiry\`, tu.\`status\`, + '${adminUserId}', '${adminUserId}' + from \`temp_user\` as \`tu\` where tu.\`id\` = '${adminUserId}'; + `) + + // insert user with temp_user data + await queryRunner.query(` + insert into \`user\` (\`id\`, \`name\`, \`email\`, \`credential\`, \`tempToken\`, \`tokenExpiry\`, \`status\`, \`createdBy\`, \`updatedBy\`) + select tu.\`id\`, coalesce(tu.\`name\`, tu.\`email\`), tu.\`email\`, tu.\`credential\`, tu.\`tempToken\`, tu.\`tokenExpiry\`, tu.\`status\`, + '${adminUserId}', '${adminUserId}' + from \`temp_user\` as \`tu\` where tu.\`id\` != '${adminUserId}'; + `) + + /*------------------------------------- + ----------- organization -------------- + --------------------------------------*/ + // insert organization with temp_organization data + await queryRunner.query(` + insert into \`organization\` (\`id\`, \`name\`, \`createdBy\`, \`updatedBy\`) + select \`id\`, \`name\`, \`adminUserId\`, \`adminUserId\` from \`temp_organization\`; + `) + + /*------------------------------------- + ----------- login method -------------- + --------------------------------------*/ + // insert login_method with temp_organization data + for (let config of ssoConfig) { + const newConfigFormat = { + domain: config.domain === '' || config.domain === undefined ? undefined : config.domain, + tenantID: config.tenantID === '' || config.tenantID === undefined ? undefined : config.tenantID, + clientID: config.clientID === '' || config.clientID === undefined ? undefined : config.clientID, + clientSecret: config.clientSecret === '' || config.clientSecret === undefined ? undefined : config.clientSecret + } + const status = config.configEnabled === true ? LoginMethodStatus.ENABLE : LoginMethodStatus.DISABLE + + const allUndefined = Object.values(newConfigFormat).every((value) => value === undefined) + if (allUndefined && status === LoginMethodStatus.DISABLE) continue + const encryptData = await encrypt(JSON.stringify(newConfigFormat)) + + await queryRunner.query(` + insert into \`login_method\` (\`organizationId\`, \`name\`, \`config\`, \`status\`, \`createdBy\`, \`updatedBy\`) + values('${organizationId}','${config.providerName}','${encryptData}','${status}','${adminUserId}','${adminUserId}'); + `) + } + + /*------------------------------------- + --------------- role ------------------ + --------------------------------------*/ + // insert workspace role into role + const workspaceRole = await queryRunner.query(`select \`id\`, \`name\`, \`description\`, \`permissions\` from \`temp_role\`;`) + for (let role of workspaceRole) { + role.permissions = JSON.stringify(role.permissions.split(',').filter((permission: string) => permission.trim() !== '')) + const haveDescriptionQuery = `insert into \`role\` (\`id\`, \`organizationId\`, \`name\`, \`description\`, \`permissions\`, \`createdBy\`, \`updatedBy\`) + values('${role.id}','${organizationId}','${role.name}','${role.description}','${role.permissions}','${adminUserId}','${adminUserId}');` + const noHaveDescriptionQuery = `insert into \`role\` (\`id\`, \`organizationId\`, \`name\`, \`permissions\`, \`createdBy\`, \`updatedBy\`) + values('${role.id}','${organizationId}','${role.name}','${role.permissions}','${adminUserId}','${adminUserId}');` + const insertRoleQuery = role.description ? haveDescriptionQuery : noHaveDescriptionQuery + await queryRunner.query(insertRoleQuery) + } + + /*------------------------------------- + ---------- organization_user ---------- + --------------------------------------*/ + const roles = await queryRunner.query('select * from `role`;') + // insert organization_user with user, role and temp_organization data + for (let user of users) { + const roleId = + user.id === adminUserId + ? roles.find((role: any) => role.name === GeneralRole.OWNER).id + : roles.find((role: any) => role.name === GeneralRole.MEMBER).id + await queryRunner.query(` + insert into \`organization_user\` (\`organizationId\`, \`userId\`, \`roleId\`, \`status\`, \`createdBy\`, \`updatedBy\`) + values ('${organizationId}','${user.id}','${roleId}','${user.status}','${adminUserId}','${adminUserId}'); + `) + } + + /*------------------------------------- + ------------- workspace --------------- + --------------------------------------*/ + const workspaces = await queryRunner.query('select * from `workspace`;') + for (let workspace of workspaces) { + await queryRunner.query( + `update \`workspace\` set \`createdBy\` = '${adminUserId}', \`updatedBy\` = '${adminUserId}' where \`id\` = '${workspace.id}';` + ) + } + + /*------------------------------------- + ----------- workspace_user ------------ + --------------------------------------*/ + const workspaceUsers = await queryRunner.query('select * from `temp_workspace_user`;') + for (let workspaceUser of workspaceUsers) { + switch (workspaceUser.role) { + case 'org_admin': + workspaceUser.role = roles.find((role: any) => role.name === GeneralRole.OWNER).id + break + case 'pw': + workspaceUser.role = roles.find((role: any) => role.name === GeneralRole.PERSONAL_WORKSPACE).id + break + default: + workspaceUser.role = roles.find((role: any) => role.name === workspaceUser.role).id + break + } + const user = users.find((user: any) => user.id === workspaceUser.userId) + const workspace = workspaces.find((workspace: any) => workspace.id === workspaceUser.workspaceId) + if (workspaceUser.workspaceId === user.activeWorkspaceId && user.lastLogin) { + const lastLogin = new Date(user.lastLogin).toISOString().replace('T', ' ').slice(0, 19) + await queryRunner.query(` + insert into \`workspace_user\` (\`workspaceId\`, \`userId\`, \`roleId\`, \`status\`, \`lastLogin\`,\`createdBy\`, \`updatedBy\`) + values ('${workspaceUser.workspaceId}','${workspaceUser.userId}','${workspaceUser.role}','${WorkspaceUserStatus.ACTIVE}','${lastLogin}','${adminUserId}','${adminUserId}'); + `) + } else if (workspace.name === WorkspaceName.DEFAULT_PERSONAL_WORKSPACE && !user.lastLogin) { + // Skip personal workspaces for users who haven't signed up yet to avoid duplicates when they sign up. + // account.service.ts creates personal workspace during sign-up. + await queryRunner.query(` + delete from \`temp_workspace_user\` where \`workspaceId\` = '${workspaceUser.workspaceId}' and \`userId\` = '${workspaceUser.userId}'; + `) + await queryRunner.query(` + delete from \`workspace\` where \`id\` = '${workspaceUser.workspaceId}'; + `) + } else { + await queryRunner.query(` + insert into \`workspace_user\` (\`workspaceId\`, \`userId\`, \`roleId\`, \`status\`,\`createdBy\`, \`updatedBy\`) + values ('${workspaceUser.workspaceId}','${workspaceUser.userId}','${workspaceUser.role}','${WorkspaceUserStatus.INVITED}','${adminUserId}','${adminUserId}'); + `) + } + } + + await this.deleteWorkspaceWithoutUser(queryRunner) + } + + private async deleteTempTable(queryRunner: QueryRunner): Promise { + await queryRunner.query(` + drop table \`temp_workspace_user\`; + `) + await queryRunner.query(` + drop table \`temp_role\`; + `) + await queryRunner.query(` + drop table \`temp_organization\`; + `) + await queryRunner.query(` + drop table \`temp_user\`; + `) + } + + public async up(queryRunner: QueryRunner): Promise { + await this.modifyTable(queryRunner) + await this.populateTable(queryRunner) + await this.deleteTempTable(queryRunner) + + // This query cannot be part of the modifyTable function because: + // 1. The \`organizationId\` in the \`workspace\` table might be referencing data in the \`temp_organization\` table, so it must be altered last. + // 2. Setting \`createdBy\` and \`updatedBy\` to NOT NULL needs to happen after ensuring there’s no existing data that would violate the constraint, + // because altering these columns while there is data could prevent new records from being inserted into the \`workspace\` table. + await queryRunner.query(` + alter table \`workspace\` + modify column \`createdBy\` varchar(36) not null, + modify column \`updatedBy\` varchar(36) not null, + add constraint \`fk_organizationId\` foreign key (\`organizationId\`) references \`organization\` (\`id\`), + add constraint \`fk_workspace_createdBy\` foreign key (\`createdBy\`) references \`user\` (\`id\`), + add constraint \`fk_workspace_updatedBy\` foreign key (\`updatedBy\`) references \`user\` (\`id\`); + `) + + // modify evaluation table for average_metrics column to be nullable + await queryRunner.query(` + alter table \`evaluation\` + modify column \`average_metrics\` longtext null; + `) + } + + public async down(): Promise {} +} diff --git a/packages/server/src/enterprise/database/migrations/mariadb/1746862866554-ExecutionLinkWorkspaceId.ts b/packages/server/src/enterprise/database/migrations/mariadb/1746862866554-ExecutionLinkWorkspaceId.ts new file mode 100644 index 00000000000..44d4e8855d5 --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/mariadb/1746862866554-ExecutionLinkWorkspaceId.ts @@ -0,0 +1,30 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' +import { ensureColumnExists } from './mariaDbCustomFunctions' + +export class ExecutionLinkWorkspaceId1746862866554 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + // step 1 - add workspaceId column + await ensureColumnExists(queryRunner, 'execution', 'workspaceId', 'varchar(36)') + + // step 2 - add index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`execution\` + ADD INDEX \`idx_execution_workspaceId\` (\`workspaceId\`), + ADD CONSTRAINT \`fk_execution_workspaceId\` + FOREIGN KEY (\`workspaceId\`) + REFERENCES \`workspace\`(\`id\`); + `) + } + + public async down(queryRunner: QueryRunner): Promise { + // step 1 - drop index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`execution\` + DROP INDEX \`idx_execution_workspaceId\`, + DROP FOREIGN KEY \`fk_execution_workspaceId\`; + `) + + // step 2 - drop workspaceId column + await queryRunner.query(`ALTER TABLE \`execution\` DROP COLUMN \`workspaceId\`;`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/mariadb/mariaDbCustomFunctions.ts b/packages/server/src/enterprise/database/migrations/mariadb/mariaDbCustomFunctions.ts new file mode 100644 index 00000000000..05bc715bd45 --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/mariadb/mariaDbCustomFunctions.ts @@ -0,0 +1,26 @@ +import { QueryRunner } from 'typeorm' + +export const ensureColumnExists = async ( + queryRunner: QueryRunner, + tableName: string, + columnName: string, + columnType: string // Accept column type as a parameter +): Promise => { + // Check if the specified column exists in the given table + const columnCheck = await queryRunner.query( + ` + SELECT COLUMN_NAME + FROM information_schema.COLUMNS + WHERE TABLE_NAME = ? AND COLUMN_NAME = ? AND TABLE_SCHEMA = ? + `, + [tableName, columnName, queryRunner.connection.options.database] + ) + + // Check if the column exists + const columnExists = columnCheck.length > 0 + + if (!columnExists) { + // Add the column if it does not exist + await queryRunner.query(`ALTER TABLE ${tableName} ADD COLUMN ${columnName} ${columnType};`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/mysql/1720230151482-AddAuthTables.ts b/packages/server/src/enterprise/database/migrations/mysql/1720230151482-AddAuthTables.ts new file mode 100644 index 00000000000..408b0f5333f --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/mysql/1720230151482-AddAuthTables.ts @@ -0,0 +1,46 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddAuthTables1720230151482 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS \`user\` ( + \`id\` varchar(36) NOT NULL, + \`name\` varchar(255), + \`role\` varchar(20) NOT NULL, + \`status\` varchar(20) NOT NULL, + \`email\` varchar(100) NOT NULL, + \`credential\` text, + \`tempToken\` text, + \`tokenExpiry\` datetime(6), + \`activeWorkspaceId\` varchar(100), + \`lastLogin\` datetime(6), + PRIMARY KEY (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;` + ) + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS \`roles\` ( + \`id\` varchar(36) NOT NULL, + \`name\` varchar(255), + \`description\` varchar(255), + \`permissions\` text, + PRIMARY KEY (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;` + ) + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS \`login_activity\` ( + \`id\` varchar(36) NOT NULL, + \`username\` varchar(255), + \`message\` varchar(255) NOT NULL, + \`activity_code\` INT NOT NULL, + \`attemptedDateTime\` datetime(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6) ON UPDATE CURRENT_TIMESTAMP(6), + PRIMARY KEY (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;` + ) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP TABLE user`) + await queryRunner.query(`DROP TABLE roles`) + await queryRunner.query(`DROP TABLE login_activity`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/mysql/1720230151484-AddWorkspace.ts b/packages/server/src/enterprise/database/migrations/mysql/1720230151484-AddWorkspace.ts new file mode 100644 index 00000000000..674502c3217 --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/mysql/1720230151484-AddWorkspace.ts @@ -0,0 +1,52 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' +import { ensureColumnExists } from './mysqlCustomFunctions' + +export class AddWorkspace1720230151484 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS \`workspace\` ( + \`id\` varchar(36) NOT NULL, + \`name\` varchar(255) NOT NULL, + \`description\` varchar(255) DEFAULT NULL, + \`createdDate\` datetime(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6), + \`updatedDate\` datetime(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6) ON UPDATE CURRENT_TIMESTAMP(6), + PRIMARY KEY (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;` + ) + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS \`workspace_users\` ( + \`id\` varchar(36) NOT NULL, + \`workspaceId\` varchar(36) NOT NULL, + \`userId\` varchar(50) NOT NULL, + \`role\` varchar(20) DEFAULT NULL, + PRIMARY KEY (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;` + ) + + await ensureColumnExists(queryRunner, 'chat_flow', 'workspaceId', 'varchar(36)') + await ensureColumnExists(queryRunner, 'tool', 'workspaceId', 'varchar(36)') + await ensureColumnExists(queryRunner, 'assistant', 'workspaceId', 'varchar(36)') + await ensureColumnExists(queryRunner, 'credential', 'workspaceId', 'varchar(36)') + await ensureColumnExists(queryRunner, 'document_store', 'workspaceId', 'varchar(36)') + await ensureColumnExists(queryRunner, 'evaluation', 'workspaceId', 'varchar(36)') + await ensureColumnExists(queryRunner, 'evaluator', 'workspaceId', 'varchar(36)') + await ensureColumnExists(queryRunner, 'dataset', 'workspaceId', 'varchar(36)') + await ensureColumnExists(queryRunner, 'apikey', 'workspaceId', 'varchar(36)') + await ensureColumnExists(queryRunner, 'variable', 'workspaceId', 'varchar(36)') + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP TABLE workspace`) + await queryRunner.query(`DROP TABLE workspace_users`) + + await queryRunner.query(`ALTER TABLE \`chat_message\` DROP COLUMN \`workspaceId\`;`) + await queryRunner.query(`ALTER TABLE \`tool\` DROP COLUMN \`workspaceId\`;`) + await queryRunner.query(`ALTER TABLE \`assistant\` DROP COLUMN \`workspaceId\`;`) + await queryRunner.query(`ALTER TABLE \`credential\` DROP COLUMN \`workspaceId\`;`) + await queryRunner.query(`ALTER TABLE \`document_store\` DROP COLUMN \`workspaceId\`;`) + await queryRunner.query(`ALTER TABLE \`evaluation\` DROP COLUMN \`workspaceId\`;`) + await queryRunner.query(`ALTER TABLE \`dataset\` DROP COLUMN \`workspaceId\`;`) + await queryRunner.query(`ALTER TABLE \`apikey\` DROP COLUMN \`workspaceId\`;`) + await queryRunner.query(`ALTER TABLE \`variable\` DROP COLUMN \`workspaceId\`;`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/mysql/1726654922034-AddWorkspaceShared.ts b/packages/server/src/enterprise/database/migrations/mysql/1726654922034-AddWorkspaceShared.ts new file mode 100644 index 00000000000..2ed824280c1 --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/mysql/1726654922034-AddWorkspaceShared.ts @@ -0,0 +1,21 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddWorkspaceShared1726654922034 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS \`workspace_shared\` ( + \`id\` varchar(36) NOT NULL, + \`workspaceId\` varchar(36) NOT NULL, + \`sharedItemId\` varchar(50) NOT NULL, + \`itemType\` varchar(50) NOT NULL, + \`createdDate\` datetime(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6), + \`updatedDate\` datetime(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6) ON UPDATE CURRENT_TIMESTAMP(6), + PRIMARY KEY (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;` + ) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP TABLE workspace_shared`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/mysql/1726655750383-AddWorkspaceIdToCustomTemplate.ts b/packages/server/src/enterprise/database/migrations/mysql/1726655750383-AddWorkspaceIdToCustomTemplate.ts new file mode 100644 index 00000000000..c6b6c2ab2bd --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/mysql/1726655750383-AddWorkspaceIdToCustomTemplate.ts @@ -0,0 +1,11 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddWorkspaceIdToCustomTemplate1726655750383 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE \`custom_template\` ADD COLUMN \`workspaceId\` varchar(36);`) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE \`custom_template\` DROP COLUMN \`workspaceId\`;`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/mysql/1727798417345-AddOrganization.ts b/packages/server/src/enterprise/database/migrations/mysql/1727798417345-AddOrganization.ts new file mode 100644 index 00000000000..dd88eca1293 --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/mysql/1727798417345-AddOrganization.ts @@ -0,0 +1,26 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddOrganization1727798417345 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS \`organization\` ( + \`id\` varchar(36) NOT NULL, + \`name\` varchar(255) NOT NULL, + \`adminUserId\` varchar(255) NULL, + \`defaultWsId\` varchar(255) NULL, + \`organization_type\` varchar(255) NULL, + \`createdDate\` timestamp(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6), + \`updatedDate\` timestamp(6) NOT NULL DEFAULT CURRENT_TIMESTAMP(6) ON UPDATE CURRENT_TIMESTAMP(6), + PRIMARY KEY (\`id\`), + KEY \`idx_organization_id\` (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;` + ) + await queryRunner.query(`ALTER TABLE \`workspace\` ADD COLUMN \`organizationId\` varchar(36);`) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP TABLE IF EXISTS \`organization\`;`) + + await queryRunner.query(`ALTER TABLE \`workspace\` DROP COLUMN \`organizationId\`;`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/mysql/1729130948686-LinkWorkspaceId.ts b/packages/server/src/enterprise/database/migrations/mysql/1729130948686-LinkWorkspaceId.ts new file mode 100644 index 00000000000..b866afb9689 --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/mysql/1729130948686-LinkWorkspaceId.ts @@ -0,0 +1,233 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class LinkWorkspaceId1729130948686 implements MigrationInterface { + name = 'LinkWorkspaceId1729130948686' + + public async up(queryRunner: QueryRunner): Promise { + // step 1 - add index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`apikey\` + ADD INDEX \`idx_apikey_workspaceId\` (\`workspaceId\`), + ADD CONSTRAINT \`fk_apikey_workspaceId\` + FOREIGN KEY (\`workspaceId\`) + REFERENCES \`workspace\`(\`id\`); + `) + + // step 1 - add index and foreign key for activeWorkspaceId + await queryRunner.query(` + ALTER TABLE \`user\` + ADD INDEX \`idx_user_activeWorkspaceId\` (\`activeWorkspaceId\`), + ADD CONSTRAINT \`fk_user_activeWorkspaceId\` + FOREIGN KEY (\`activeWorkspaceId\`) + REFERENCES \`workspace\`(\`id\`); + `) + + // step 1 - add index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`workspace_users\` + ADD INDEX \`idx_workspace_users_workspaceId\` (\`workspaceId\`), + ADD CONSTRAINT \`fk_workspace_users_workspaceId\` + FOREIGN KEY (\`workspaceId\`) + REFERENCES \`workspace\`(\`id\`); + `) + + // step 1 - add index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`chat_flow\` + ADD INDEX \`idx_chat_flow_workspaceId\` (\`workspaceId\`), + ADD CONSTRAINT \`fk_chat_flow_workspaceId\` + FOREIGN KEY (\`workspaceId\`) + REFERENCES \`workspace\`(\`id\`); + `) + + // step 1 - add index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`tool\` + ADD INDEX \`idx_tool_workspaceId\` (\`workspaceId\`), + ADD CONSTRAINT \`fk_tool_workspaceId\` + FOREIGN KEY (\`workspaceId\`) + REFERENCES \`workspace\`(\`id\`); + `) + + // step 1 - add index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`assistant\` + ADD INDEX \`idx_assistant_workspaceId\` (\`workspaceId\`), + ADD CONSTRAINT \`fk_assistant_workspaceId\` + FOREIGN KEY (\`workspaceId\`) + REFERENCES \`workspace\`(\`id\`); + `) + + // step 1 - add index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`credential\` + ADD INDEX \`idx_credential_workspaceId\` (\`workspaceId\`), + ADD CONSTRAINT \`fk_credential_workspaceId\` + FOREIGN KEY (\`workspaceId\`) + REFERENCES \`workspace\`(\`id\`); + `) + + // step 1 - add index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`document_store\` + ADD INDEX \`idx_document_store_workspaceId\` (\`workspaceId\`), + ADD CONSTRAINT \`fk_document_store_workspaceId\` + FOREIGN KEY (\`workspaceId\`) + REFERENCES \`workspace\`(\`id\`); + `) + + // step 1 - add index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`evaluation\` + ADD INDEX \`idx_evaluation_workspaceId\` (\`workspaceId\`), + ADD CONSTRAINT \`fk_evaluation_workspaceId\` + FOREIGN KEY (\`workspaceId\`) + REFERENCES \`workspace\`(\`id\`); + `) + + // step 1 - add index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`evaluator\` + ADD INDEX \`idx_evaluator_workspaceId\` (\`workspaceId\`), + ADD CONSTRAINT \`fk_evaluator_workspaceId\` + FOREIGN KEY (\`workspaceId\`) + REFERENCES \`workspace\`(\`id\`); + `) + + // step 1 - add index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`dataset\` + ADD INDEX \`idx_dataset_workspaceId\` (\`workspaceId\`), + ADD CONSTRAINT \`fk_dataset_workspaceId\` + FOREIGN KEY (\`workspaceId\`) + REFERENCES \`workspace\`(\`id\`); + `) + + // step 1 - add index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`variable\` + ADD INDEX \`idx_variable_workspaceId\` (\`workspaceId\`), + ADD CONSTRAINT \`fk_variable_workspaceId\` + FOREIGN KEY (\`workspaceId\`) + REFERENCES \`workspace\`(\`id\`); + `) + + // step 1 - add index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`workspace_shared\` + ADD INDEX \`idx_workspace_shared_workspaceId\` (\`workspaceId\`), + ADD CONSTRAINT \`fk_workspace_shared_workspaceId\` + FOREIGN KEY (\`workspaceId\`) + REFERENCES \`workspace\`(\`id\`); + `) + + // step 1 - add index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`custom_template\` + ADD INDEX \`idx_custom_template_workspaceId\` (\`workspaceId\`), + ADD CONSTRAINT \`fk_custom_template_workspaceId\` + FOREIGN KEY (\`workspaceId\`) + REFERENCES \`workspace\`(\`id\`); + `) + } + + public async down(queryRunner: QueryRunner): Promise { + // step 1 - drop index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`apikey\` + DROP INDEX \`idx_apikey_workspaceId\`, + DROP FOREIGN KEY \`fk_apikey_workspaceId\`; + `) + + // step 1 - drop index and foreign key for activeWorkspaceId + await queryRunner.query(` + ALTER TABLE \`user\` + DROP INDEX \`idx_user_activeWorkspaceId\`, + DROP FOREIGN KEY \`fk_user_activeWorkspaceId\`; + `) + + // step 1 - drop index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`workspace_users\` + DROP INDEX \`idx_workspace_users_workspaceId\`, + DROP FOREIGN KEY \`fk_workspace_users_workspaceId\`; + `) + + // step 1 - drop index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`chat_flow\` + DROP INDEX \`idx_chat_flow_workspaceId\`, + DROP FOREIGN KEY \`fk_chat_flow_workspaceId\`; + `) + + // step 1 - drop index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`tool\` + DROP INDEX \`idx_tool_workspaceId\`, + DROP FOREIGN KEY \`fk_tool_workspaceId\`; + `) + + // step 1 - drop index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`assistant\` + DROP INDEX \`idx_assistant_workspaceId\`, + DROP FOREIGN KEY \`fk_assistant_workspaceId\`; + `) + + // step 1 - drop index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`credential\` + DROP INDEX \`idx_credential_workspaceId\`, + DROP FOREIGN KEY \`fk_credential_workspaceId\`; + `) + + // step 1 - drop index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`document_store\` + DROP INDEX \`idx_document_store_workspaceId\`, + DROP FOREIGN KEY \`fk_document_store_workspaceId\`; + `) + + // step 1 - drop index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`evaluation\` + DROP INDEX \`idx_evaluation_workspaceId\`, + DROP FOREIGN KEY \`fk_evaluation_workspaceId\`; + `) + + // step 1 - drop index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`evaluator\` + DROP INDEX \`idx_evaluator_workspaceId\`, + DROP FOREIGN KEY \`fk_evaluator_workspaceId\`; + `) + + // step 1 - drop index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`dataset\` + DROP INDEX \`idx_dataset_workspaceId\`, + DROP FOREIGN KEY \`fk_dataset_workspaceId\`; + `) + + // step 1 - drop index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`variable\` + DROP INDEX \`idx_variable_workspaceId\`, + DROP FOREIGN KEY \`fk_variable_workspaceId\`; + `) + + // step 1 - drop index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`workspace_shared\` + DROP INDEX \`idx_workspace_shared_workspaceId\`, + DROP FOREIGN KEY \`fk_workspace_shared_workspaceId\`; + `) + + // step 1 - drop index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`custom_template\` + DROP INDEX \`idx_custom_template_workspaceId\`, + DROP FOREIGN KEY \`fk_custom_template_workspaceId\`; + `) + } +} diff --git a/packages/server/src/enterprise/database/migrations/mysql/1729133111652-LinkOrganizationId.ts b/packages/server/src/enterprise/database/migrations/mysql/1729133111652-LinkOrganizationId.ts new file mode 100644 index 00000000000..42ab342a7ab --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/mysql/1729133111652-LinkOrganizationId.ts @@ -0,0 +1,25 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class LinkOrganizationId1729133111652 implements MigrationInterface { + name = 'LinkOrganizationId1729133111652' + + public async up(queryRunner: QueryRunner): Promise { + // step 1 - add index and foreign key for organizationId + await queryRunner.query(` + ALTER TABLE \`workspace\` + ADD INDEX \`idx_workspace_organizationId\` (\`organizationId\`), + ADD CONSTRAINT \`fk_workspace_organizationId\` + FOREIGN KEY (\`organizationId\`) + REFERENCES \`organization\`(\`id\`); + `) + } + + public async down(queryRunner: QueryRunner): Promise { + // step 1 - drop index and foreign key for organizationId + await queryRunner.query(` + ALTER TABLE \`workspace\` + DROP INDEX \`idx_workspace_organizationId\`, + DROP FOREIGN KEY \`fk_workspace_organizationId\`; + `) + } +} diff --git a/packages/server/src/enterprise/database/migrations/mysql/1730519457880-AddSSOColumns.ts b/packages/server/src/enterprise/database/migrations/mysql/1730519457880-AddSSOColumns.ts new file mode 100644 index 00000000000..c94d29f91da --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/mysql/1730519457880-AddSSOColumns.ts @@ -0,0 +1,16 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' +import { ensureColumnExists } from './mysqlCustomFunctions' + +export class AddSSOColumns1730519457880 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await ensureColumnExists(queryRunner, 'organization', 'sso_config', 'text') + await ensureColumnExists(queryRunner, 'user', 'user_type', 'varchar(10)') + await ensureColumnExists(queryRunner, 'login_activity', 'login_mode', 'varchar(25)') + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE "organization" DROP COLUMN "sso_config";`) + await queryRunner.query(`ALTER TABLE "user" DROP COLUMN "user_type";`) + await queryRunner.query(`ALTER TABLE "login_activity" DROP COLUMN "login_mode";`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/mysql/1734074497540-AddPersonalWorkspace.ts b/packages/server/src/enterprise/database/migrations/mysql/1734074497540-AddPersonalWorkspace.ts new file mode 100644 index 00000000000..0a27257793c --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/mysql/1734074497540-AddPersonalWorkspace.ts @@ -0,0 +1,29 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' +import { v4 as uuidv4 } from 'uuid' + +export class AddPersonalWorkspace1734074497540 implements MigrationInterface { + name = 'AddPersonalWorkspace1734074497540' + + public async up(queryRunner: QueryRunner): Promise { + const users = await queryRunner.query(`select * from \`user\`;`) + const organization = await queryRunner.query(`select \`id\` from \`organization\`;`) + for (let user of users) { + const workspaceDescription = 'Personal Workspace of ' + user.id + const workspaceId = uuidv4() + + await queryRunner.query(` + insert into \`workspace\` (\`id\`, \`name\`, \`description\`, \`organizationId\`) + values('${workspaceId}', 'Personal Workspace', '${workspaceDescription}', '${organization[0].id}'); + `) + + const workspaceUsersId = uuidv4() + + await queryRunner.query(` + insert into \`workspace_users\` (\`id\`, \`workspaceId\`, \`userId\`, \`role\`) + values('${workspaceUsersId}', '${workspaceId}', '${user.id}', 'pw'); + `) + } + } + + public async down(): Promise {} +} diff --git a/packages/server/src/enterprise/database/migrations/mysql/1737076223692-RefactorEnterpriseDatabase.ts b/packages/server/src/enterprise/database/migrations/mysql/1737076223692-RefactorEnterpriseDatabase.ts new file mode 100644 index 00000000000..e68db6731df --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/mysql/1737076223692-RefactorEnterpriseDatabase.ts @@ -0,0 +1,494 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' +import { decrypt, encrypt } from '../../../utils/encryption.util' +import { LoginMethodStatus } from '../../entities/login-method.entity' +import { OrganizationUserStatus } from '../../entities/organization-user.entity' +import { OrganizationName } from '../../entities/organization.entity' +import { GeneralRole } from '../../entities/role.entity' +import { UserStatus } from '../../entities/user.entity' +import { WorkspaceUserStatus } from '../../entities/workspace-user.entity' +import { WorkspaceName } from '../../entities/workspace.entity' + +export class RefactorEnterpriseDatabase1737076223692 implements MigrationInterface { + name = 'RefactorEnterpriseDatabase1737076223692' + private async modifyTable(queryRunner: QueryRunner): Promise { + /*------------------------------------- + --------------- user ----------------- + --------------------------------------*/ + // rename user table to temp_user + await queryRunner.query(`alter table \`user\` rename to \`temp_user\`;`) + + // create user table + await queryRunner.query(` + create table \`user\` ( + \`id\` varchar(36) default (uuid()) primary key, + \`name\` varchar(100) not null, + \`email\` varchar(255) not null unique, + \`credential\` text null, + \`tempToken\` text null, + \`tokenExpiry\` timestamp null, + \`status\` varchar(20) default '${UserStatus.UNVERIFIED}' not null, + \`createdDate\` timestamp default now() not null, + \`updatedDate\` timestamp default now() not null, + \`createdBy\` varchar(36) not null, + \`updatedBy\` varchar(36) not null, + constraint \`fk_user_createdBy\` foreign key (\`createdBy\`) references \`user\` (\`id\`), + constraint \`fk_user_updatedBy\` foreign key (\`updatedBy\`) references \`user\` (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci; + `) + + /*------------------------------------- + ----------- organization -------------- + --------------------------------------*/ + // rename organization table to temp_organization + await queryRunner.query(`alter table \`organization\` rename to \`temp_organization\`;`) + + // create organization table + await queryRunner.query(` + create table \`organization\` ( + \`id\` varchar(36) default (uuid()) primary key, + \`name\` varchar(100) default '${OrganizationName.DEFAULT_ORGANIZATION}' not null, + \`customerId\` varchar(100) null, + \`subscriptionId\` varchar(100) null, + \`createdDate\` timestamp default now() not null, + \`updatedDate\` timestamp default now() not null, + \`createdBy\` varchar(36) not null, + \`updatedBy\` varchar(36) not null, + constraint \`fk_organization_createdBy\` foreign key (\`createdBy\`) references \`user\` (\`id\`), + constraint \`fk_organization_updatedBy\` foreign key (\`updatedBy\`) references \`user\` (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci; + `) + + /*------------------------------------- + ----------- login method -------------- + --------------------------------------*/ + // create login_method table + await queryRunner.query(` + create table \`login_method\` ( + \`id\` varchar(36) default (uuid()) primary key, + \`organizationId\` varchar(36) null, + \`name\` varchar(100) not null, + \`config\` text not null, + \`status\` varchar(20) default '${LoginMethodStatus.ENABLE}' not null, + \`createdDate\` timestamp default now() not null, + \`updatedDate\` timestamp default now() not null, + \`createdBy\` varchar(36) null, + \`updatedBy\` varchar(36) null, + constraint \`fk_login_method_organizationId\` foreign key (\`organizationId\`) references \`organization\` (\`id\`), + constraint \`fk_login_method_createdBy\` foreign key (\`createdBy\`) references \`user\` (\`id\`), + constraint \`fk_login_method_updatedBy\` foreign key (\`updatedBy\`) references \`user\` (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci; + `) + + /*------------------------------------- + --------------- role ------------------ + --------------------------------------*/ + // rename roles table to temp_role + await queryRunner.query(`alter table \`roles\` rename to \`temp_role\`;`) + + // create organization_login_method table + await queryRunner.query(` + create table \`role\` ( + \`id\` varchar(36) default (uuid()) primary key, + \`organizationId\` varchar(36) null, + \`name\` varchar(100) not null, + \`description\` text null, + \`permissions\` text not null, + \`createdDate\` timestamp default now() not null, + \`updatedDate\` timestamp default now() not null, + \`createdBy\` varchar(36) null, + \`updatedBy\` varchar(36) null, + constraint \`fk_role_organizationId\` foreign key (\`organizationId\`) references \`organization\` (\`id\`), + constraint \`fk_role_createdBy\` foreign key (\`createdBy\`) references \`user\` (\`id\`), + constraint \`fk_role_updatedBy\` foreign key (\`updatedBy\`) references \`user\` (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci; + `) + + /*------------------------------------- + ---------- organization_user ---------- + --------------------------------------*/ + // create organization_user table + await queryRunner.query(` + create table \`organization_user\` ( + \`organizationId\` varchar(36) not null, + \`userId\` varchar(36) not null, + \`roleId\` varchar(36) not null, + \`status\` varchar(20) default '${OrganizationUserStatus.ACTIVE}' not null, + \`createdDate\` timestamp default now() not null, + \`updatedDate\` timestamp default now() not null, + \`createdBy\` varchar(36) not null, + \`updatedBy\` varchar(36) not null, + constraint \`pk_organization_user\` primary key (\`organizationId\`, \`userId\`), + constraint \`fk_organization_user_organizationId\` foreign key (\`organizationId\`) references \`organization\` (\`id\`), + constraint \`fk_organization_user_userId\` foreign key (\`userId\`) references \`user\` (\`id\`), + constraint \`fk_organization_user_roleId\` foreign key (\`roleId\`) references \`role\` (\`id\`), + constraint \`fk_organization_user_createdBy\` foreign key (\`createdBy\`) references \`user\` (\`id\`), + constraint \`fk_organization_user_updatedBy\` foreign key (\`updatedBy\`) references \`user\` (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci; + `) + + /*------------------------------------- + ------------- workspace --------------- + --------------------------------------*/ + // modify workspace table + await queryRunner.query(` + alter table \`workspace\` + drop constraint \`fk_workspace_organizationId\`; + `) + + await queryRunner.query(` + alter table \`workspace\` + modify column \`organizationId\` varchar(36) not null, + modify column \`name\` varchar(100), + modify column \`description\` text; + `) + + await queryRunner.query(` + alter table \`workspace\` + add column \`createdBy\` varchar(36) null, + add column \`updatedBy\` varchar(36) null; + `) + + // remove first if needed will be add back, will cause insert to slow + await queryRunner.query(` + drop index \`idx_workspace_organizationId\` on \`workspace\`; + `) + + /*------------------------------------- + ----------- workspace_user ------------ + --------------------------------------*/ + // rename workspace_users table to temp_workspace_user + await queryRunner.query(`alter table \`workspace_users\` rename to \`temp_workspace_user\`;`) + + // create workspace_user table + await queryRunner.query(` + create table \`workspace_user\` ( + \`workspaceId\` varchar(36) not null, + \`userId\` varchar(36) not null, + \`roleId\` varchar(36) not null, + \`status\` varchar(20) default '${WorkspaceUserStatus.INVITED}' not null, + \`lastLogin\` timestamp null, + \`createdDate\` timestamp default now() not null, + \`updatedDate\` timestamp default now() not null, + \`createdBy\` varchar(36) not null, + \`updatedBy\` varchar(36) not null, + constraint \`pk_workspace_user\` primary key (\`workspaceId\`, \`userId\`), + constraint \`fk_workspace_user_workspaceId\` foreign key (\`workspaceId\`) references \`workspace\` (\`id\`), + constraint \`fk_workspace_user_userId\` foreign key (\`userId\`) references \`user\` (\`id\`), + constraint \`fk_workspace_user_roleId\` foreign key (\`roleId\`) references \`role\` (\`id\`), + constraint \`fk_workspace_user_createdBy\` foreign key (\`createdBy\`) references \`user\` (\`id\`), + constraint \`fk_workspace_user_updatedBy\` foreign key (\`updatedBy\`) references \`user\` (\`id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci; + `) + } + + private async deleteWorkspaceWithoutUser(queryRunner: QueryRunner) { + const workspaceWithoutUser = await queryRunner.query(` + select w.\`id\` as \`id\` from \`workspace_user\` as \`wu\` + right join \`workspace\` as \`w\` on \`wu\`.\`workspaceId\` = \`w\`.\`id\` + where \`wu\`.\`userId\` is null; + `) + const workspaceIds = workspaceWithoutUser.map((workspace: { id: string }) => `'${workspace.id}'`).join(',') + + // Delete related records from other tables that reference the deleted workspaces + if (workspaceIds && workspaceIds.length > 0) { + await queryRunner.query(` + delete from \`workspace_user\` where \`workspaceId\` in (${workspaceIds}); + `) + await queryRunner.query(` + delete from \`apikey\` where \`workspaceId\` in (${workspaceIds}); + `) + await queryRunner.query(` + delete from \`assistant\` where \`workspaceId\` in (${workspaceIds}); + `) + const chatflows = await queryRunner.query(` + select id from \`chat_flow\` where \`workspaceId\` in (${workspaceIds}); + `) + const chatflowIds = chatflows.map((chatflow: { id: string }) => `'${chatflow.id}'`).join(',') + if (chatflowIds && chatflowIds.length > 0) { + await queryRunner.query(` + delete from \`chat_flow\` where \`workspaceId\` in (${workspaceIds}); + `) + await queryRunner.query(` + delete from \`upsert_history\` where \`chatflowid\` in (${chatflowIds}); + `) + await queryRunner.query(` + delete from \`chat_message\` where \`chatflowid\` in (${chatflowIds}); + `) + await queryRunner.query(` + delete from \`chat_message_feedback\` where \`chatflowid\` in (${chatflowIds}); + `) + } + await queryRunner.query(` + delete from \`credential\` where \`workspaceId\` in (${workspaceIds}); + `) + await queryRunner.query(` + delete from \`custom_template\` where \`workspaceId\` in (${workspaceIds}); + `) + const datasets = await queryRunner.query(` + select id from \`dataset\` where \`workspaceId\` in (${workspaceIds}); + `) + const datasetIds = datasets.map((dataset: { id: string }) => `'${dataset.id}'`).join(',') + if (datasetIds && datasetIds.length > 0) { + await queryRunner.query(` + delete from \`dataset\` where \`workspaceId\` in (${workspaceIds}); + `) + await queryRunner.query(` + delete from \`dataset_row\` where \`datasetId\` in (${datasetIds}); + `) + } + const documentStores = await queryRunner.query(` + select id from \`document_store\` where \`workspaceId\` in (${workspaceIds}); + `) + const documentStoreIds = documentStores.map((documentStore: { id: string }) => `'${documentStore.id}'`).join(',') + if (documentStoreIds && documentStoreIds.length > 0) { + await queryRunner.query(` + delete from \`document_store\` where \`workspaceId\` in (${workspaceIds}); + `) + await queryRunner.query(` + delete from \`document_store_file_chunk\` where \`storeId\` in (${documentStoreIds}); + `) + } + const evaluations = await queryRunner.query(` + select id from \`evaluation\` where \`workspaceId\` in (${workspaceIds}); + `) + const evaluationIds = evaluations.map((evaluation: { id: string }) => `'${evaluation.id}'`).join(',') + if (evaluationIds && evaluationIds.length > 0) { + await queryRunner.query(` + delete from \`evaluation\` where \`workspaceId\` in (${workspaceIds}); + `) + await queryRunner.query(` + delete from \`evaluation_run\` where \`evaluationId\` in (${evaluationIds}); + `) + } + await queryRunner.query(` + delete from \`evaluator\` where \`workspaceId\` in (${workspaceIds}); + `) + await queryRunner.query(` + delete from \`tool\` where \`workspaceId\` in (${workspaceIds}); + `) + await queryRunner.query(` + delete from \`variable\` where \`workspaceId\` in (${workspaceIds}); + `) + await queryRunner.query(` + delete from \`workspace_shared\` where \`workspaceId\` in (${workspaceIds}); + `) + await queryRunner.query(` + delete from \`workspace\` where \`id\` in (${workspaceIds}); + `) + } + } + + private async populateTable(queryRunner: QueryRunner): Promise { + // insert generalRole + const generalRole = [ + { + name: 'owner', + description: 'Has full control over the organization.', + permissions: '["organization","workspace"]' + }, + { + name: 'member', + description: 'Has limited control over the organization.', + permissions: '[]' + }, + { + name: 'personal workspace', + description: 'Has full control over the personal workspace', + permissions: + '[ "chatflows:view", "chatflows:create", "chatflows:update", "chatflows:duplicate", "chatflows:delete", "chatflows:export", "chatflows:import", "chatflows:config", "chatflows:domains", "agentflows:view", "agentflows:create", "agentflows:update", "agentflows:duplicate", "agentflows:delete", "agentflows:export", "agentflows:import", "agentflows:config", "agentflows:domains", "tools:view", "tools:create", "tools:update", "tools:delete", "tools:export", "assistants:view", "assistants:create", "assistants:update", "assistants:delete", "credentials:view", "credentials:create", "credentials:update", "credentials:delete", "credentials:share", "variables:view", "variables:create", "variables:update", "variables:delete", "apikeys:view", "apikeys:create", "apikeys:update", "apikeys:delete", "apikeys:import", "documentStores:view", "documentStores:create", "documentStores:update", "documentStores:delete", "documentStores:add-loader", "documentStores:delete-loader", "documentStores:preview-process", "documentStores:upsert-config", "datasets:view", "datasets:create", "datasets:update", "datasets:delete", "evaluators:view", "evaluators:create", "evaluators:update", "evaluators:delete", "evaluations:view", "evaluations:create", "evaluations:update", "evaluations:delete", "evaluations:run", "templates:marketplace", "templates:custom", "templates:custom-delete", "templates:toolexport", "templates:flowexport", "templates:custom-share", "workspace:export", "workspace:import", "executions:view", "executions:delete" ]' + } + ] + for (let role of generalRole) { + await queryRunner.query(` + insert into \`role\`(\`name\`, \`description\`, \`permissions\`) + values('${role.name}', '${role.description}', '${role.permissions}'); + `) + } + + const users = await queryRunner.query('select * from `temp_user`;') + const noExistingData = users.length > 0 === false + if (noExistingData) return + + const organizations = await queryRunner.query('select * from `temp_organization`;') + const organizationId = organizations[0].id + const adminUserId = organizations[0].adminUserId + const ssoConfig = organizations[0].sso_config ? JSON.parse(await decrypt(organizations[0].sso_config)).providers : [] + + /*------------------------------------- + --------------- user ----------------- + --------------------------------------*/ + // insert admin user first + await queryRunner.query(` + insert into \`user\` (\`id\`, \`name\`, \`email\`, \`credential\`, \`tempToken\`, \`tokenExpiry\`, \`status\`, \`createdBy\`, \`updatedBy\`) + select tu.\`id\`, coalesce(tu.\`name\`, tu.\`email\`), tu.\`email\`, tu.\`credential\`, tu.\`tempToken\`, tu.\`tokenExpiry\`, tu.\`status\`, + '${adminUserId}', '${adminUserId}' + from \`temp_user\` as \`tu\` where tu.\`id\` = '${adminUserId}'; + `) + + // insert user with temp_user data + await queryRunner.query(` + insert into \`user\` (\`id\`, \`name\`, \`email\`, \`credential\`, \`tempToken\`, \`tokenExpiry\`, \`status\`, \`createdBy\`, \`updatedBy\`) + select tu.\`id\`, coalesce(tu.\`name\`, tu.\`email\`), tu.\`email\`, tu.\`credential\`, tu.\`tempToken\`, tu.\`tokenExpiry\`, tu.\`status\`, + '${adminUserId}', '${adminUserId}' + from \`temp_user\` as \`tu\` where tu.\`id\` != '${adminUserId}'; + `) + + /*------------------------------------- + ----------- organization -------------- + --------------------------------------*/ + // insert organization with temp_organization data + await queryRunner.query(` + insert into \`organization\` (\`id\`, \`name\`, \`createdBy\`, \`updatedBy\`) + select \`id\`, \`name\`, \`adminUserId\`, \`adminUserId\` from \`temp_organization\`; + `) + + /*------------------------------------- + ----------- login method -------------- + --------------------------------------*/ + // insert login_method with temp_organization data + for (let config of ssoConfig) { + const newConfigFormat = { + domain: config.domain === '' || config.domain === undefined ? undefined : config.domain, + tenantID: config.tenantID === '' || config.tenantID === undefined ? undefined : config.tenantID, + clientID: config.clientID === '' || config.clientID === undefined ? undefined : config.clientID, + clientSecret: config.clientSecret === '' || config.clientSecret === undefined ? undefined : config.clientSecret + } + const status = config.configEnabled === true ? LoginMethodStatus.ENABLE : LoginMethodStatus.DISABLE + + const allUndefined = Object.values(newConfigFormat).every((value) => value === undefined) + if (allUndefined && status === LoginMethodStatus.DISABLE) continue + const encryptData = await encrypt(JSON.stringify(newConfigFormat)) + + await queryRunner.query(` + insert into \`login_method\` (\`organizationId\`, \`name\`, \`config\`, \`status\`, \`createdBy\`, \`updatedBy\`) + values('${organizationId}','${config.providerName}','${encryptData}','${status}','${adminUserId}','${adminUserId}'); + `) + } + + /*------------------------------------- + --------------- role ------------------ + --------------------------------------*/ + // insert workspace role into role + const workspaceRole = await queryRunner.query(`select \`id\`, \`name\`, \`description\`, \`permissions\` from \`temp_role\`;`) + for (let role of workspaceRole) { + role.permissions = JSON.stringify(role.permissions.split(',').filter((permission: string) => permission.trim() !== '')) + const haveDescriptionQuery = `insert into \`role\` (\`id\`, \`organizationId\`, \`name\`, \`description\`, \`permissions\`, \`createdBy\`, \`updatedBy\`) + values('${role.id}','${organizationId}','${role.name}','${role.description}','${role.permissions}','${adminUserId}','${adminUserId}');` + const noHaveDescriptionQuery = `insert into \`role\` (\`id\`, \`organizationId\`, \`name\`, \`permissions\`, \`createdBy\`, \`updatedBy\`) + values('${role.id}','${organizationId}','${role.name}','${role.permissions}','${adminUserId}','${adminUserId}');` + const insertRoleQuery = role.description ? haveDescriptionQuery : noHaveDescriptionQuery + await queryRunner.query(insertRoleQuery) + } + + /*------------------------------------- + ---------- organization_user ---------- + --------------------------------------*/ + const roles = await queryRunner.query('select * from `role`;') + // insert organization_user with user, role and temp_organization data + for (let user of users) { + const roleId = + user.id === adminUserId + ? roles.find((role: any) => role.name === GeneralRole.OWNER).id + : roles.find((role: any) => role.name === GeneralRole.MEMBER).id + await queryRunner.query(` + insert into \`organization_user\` (\`organizationId\`, \`userId\`, \`roleId\`, \`status\`, \`createdBy\`, \`updatedBy\`) + values ('${organizationId}','${user.id}','${roleId}','${user.status}','${adminUserId}','${adminUserId}'); + `) + } + + /*------------------------------------- + ------------- workspace --------------- + --------------------------------------*/ + const workspaces = await queryRunner.query('select * from `workspace`;') + for (let workspace of workspaces) { + await queryRunner.query( + `update \`workspace\` set \`createdBy\` = '${adminUserId}', \`updatedBy\` = '${adminUserId}' where \`id\` = '${workspace.id}';` + ) + } + + /*------------------------------------- + ----------- workspace_user ------------ + --------------------------------------*/ + const workspaceUsers = await queryRunner.query('select * from `temp_workspace_user`;') + for (let workspaceUser of workspaceUsers) { + switch (workspaceUser.role) { + case 'org_admin': + workspaceUser.role = roles.find((role: any) => role.name === GeneralRole.OWNER).id + break + case 'pw': + workspaceUser.role = roles.find((role: any) => role.name === GeneralRole.PERSONAL_WORKSPACE).id + break + default: + workspaceUser.role = roles.find((role: any) => role.name === workspaceUser.role).id + break + } + const user = users.find((user: any) => user.id === workspaceUser.userId) + const workspace = workspaces.find((workspace: any) => workspace.id === workspaceUser.workspaceId) + if (workspaceUser.workspaceId === user.activeWorkspaceId && user.lastLogin) { + const lastLogin = new Date(user.lastLogin).toISOString().replace('T', ' ').slice(0, 19) + await queryRunner.query(` + insert into \`workspace_user\` (\`workspaceId\`, \`userId\`, \`roleId\`, \`status\`, \`lastLogin\`,\`createdBy\`, \`updatedBy\`) + values ('${workspaceUser.workspaceId}','${workspaceUser.userId}','${workspaceUser.role}','${WorkspaceUserStatus.ACTIVE}','${lastLogin}','${adminUserId}','${adminUserId}'); + `) + } else if (workspace.name === WorkspaceName.DEFAULT_PERSONAL_WORKSPACE && !user.lastLogin) { + // Skip personal workspaces for users who haven't signed up yet to avoid duplicates when they sign up. + // account.service.ts creates personal workspace during sign-up. + await queryRunner.query(` + delete from \`temp_workspace_user\` where \`workspaceId\` = '${workspaceUser.workspaceId}' and \`userId\` = '${workspaceUser.userId}'; + `) + await queryRunner.query(` + delete from \`workspace\` where \`id\` = '${workspaceUser.workspaceId}'; + `) + } else { + await queryRunner.query(` + insert into \`workspace_user\` (\`workspaceId\`, \`userId\`, \`roleId\`, \`status\`,\`createdBy\`, \`updatedBy\`) + values ('${workspaceUser.workspaceId}','${workspaceUser.userId}','${workspaceUser.role}','${WorkspaceUserStatus.INVITED}','${adminUserId}','${adminUserId}'); + `) + } + } + + await this.deleteWorkspaceWithoutUser(queryRunner) + } + + private async deleteTempTable(queryRunner: QueryRunner): Promise { + await queryRunner.query(` + drop table \`temp_workspace_user\`; + `) + await queryRunner.query(` + drop table \`temp_role\`; + `) + await queryRunner.query(` + drop table \`temp_organization\`; + `) + await queryRunner.query(` + drop table \`temp_user\`; + `) + } + + public async up(queryRunner: QueryRunner): Promise { + await this.modifyTable(queryRunner) + await this.populateTable(queryRunner) + await this.deleteTempTable(queryRunner) + + // This query cannot be part of the modifyTable function because: + // 1. The \`organizationId\` in the \`workspace\` table might be referencing data in the \`temp_organization\` table, so it must be altered last. + // 2. Setting \`createdBy\` and \`updatedBy\` to NOT NULL needs to happen after ensuring there’s no existing data that would violate the constraint, + // because altering these columns while there is data could prevent new records from being inserted into the \`workspace\` table. + await queryRunner.query(` + alter table \`workspace\` + modify column \`createdBy\` varchar(36) not null, + modify column \`updatedBy\` varchar(36) not null, + add constraint \`fk_organizationId\` foreign key (\`organizationId\`) references \`organization\` (\`id\`), + add constraint \`fk_workspace_createdBy\` foreign key (\`createdBy\`) references \`user\` (\`id\`), + add constraint \`fk_workspace_updatedBy\` foreign key (\`updatedBy\`) references \`user\` (\`id\`); + `) + + // modify evaluation table for average_metrics column to be nullable + await queryRunner.query(` + alter table \`evaluation\` + modify column \`average_metrics\` longtext null; + `) + } + + public async down(): Promise {} +} diff --git a/packages/server/src/enterprise/database/migrations/mysql/1746862866554-ExecutionLinkWorkspaceId.ts b/packages/server/src/enterprise/database/migrations/mysql/1746862866554-ExecutionLinkWorkspaceId.ts new file mode 100644 index 00000000000..12f00e40672 --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/mysql/1746862866554-ExecutionLinkWorkspaceId.ts @@ -0,0 +1,30 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' +import { ensureColumnExists } from './mysqlCustomFunctions' + +export class ExecutionLinkWorkspaceId1746862866554 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + // step 1 - add workspaceId column + await ensureColumnExists(queryRunner, 'execution', 'workspaceId', 'varchar(36)') + + // step 2 - add index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`execution\` + ADD INDEX \`idx_execution_workspaceId\` (\`workspaceId\`), + ADD CONSTRAINT \`fk_execution_workspaceId\` + FOREIGN KEY (\`workspaceId\`) + REFERENCES \`workspace\`(\`id\`); + `) + } + + public async down(queryRunner: QueryRunner): Promise { + // step 1 - drop index and foreign key for workspaceId + await queryRunner.query(` + ALTER TABLE \`execution\` + DROP INDEX \`idx_execution_workspaceId\`, + DROP FOREIGN KEY \`fk_execution_workspaceId\`; + `) + + // step 2 - drop workspaceId column + await queryRunner.query(`ALTER TABLE \`execution\` DROP COLUMN \`workspaceId\`;`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/mysql/mysqlCustomFunctions.ts b/packages/server/src/enterprise/database/migrations/mysql/mysqlCustomFunctions.ts new file mode 100644 index 00000000000..05bc715bd45 --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/mysql/mysqlCustomFunctions.ts @@ -0,0 +1,26 @@ +import { QueryRunner } from 'typeorm' + +export const ensureColumnExists = async ( + queryRunner: QueryRunner, + tableName: string, + columnName: string, + columnType: string // Accept column type as a parameter +): Promise => { + // Check if the specified column exists in the given table + const columnCheck = await queryRunner.query( + ` + SELECT COLUMN_NAME + FROM information_schema.COLUMNS + WHERE TABLE_NAME = ? AND COLUMN_NAME = ? AND TABLE_SCHEMA = ? + `, + [tableName, columnName, queryRunner.connection.options.database] + ) + + // Check if the column exists + const columnExists = columnCheck.length > 0 + + if (!columnExists) { + // Add the column if it does not exist + await queryRunner.query(`ALTER TABLE ${tableName} ADD COLUMN ${columnName} ${columnType};`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/postgres/1720230151482-AddAuthTables.ts b/packages/server/src/enterprise/database/migrations/postgres/1720230151482-AddAuthTables.ts new file mode 100644 index 00000000000..071b97efe89 --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/postgres/1720230151482-AddAuthTables.ts @@ -0,0 +1,44 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddAuthTables1720230151482 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS "user" ( + id uuid NOT NULL DEFAULT uuid_generate_v4(), + "name" varchar, + "role" varchar NOT NULL, + "credential" text, + "tempToken" text, + "tokenExpiry" timestamp, + "email" varchar NOT NULL, + "status" varchar NOT NULL, + "activeWorkspaceId" varchar, + "lastLogin" timestamp, + CONSTRAINT "PK_98455643dd334f54-9830ab78f9" PRIMARY KEY (id) + );` + ) + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS "roles" ( + id uuid NOT NULL DEFAULT uuid_generate_v4(), + "name" varchar, + "description" varchar, + "permissions" text, + CONSTRAINT "PK_98488643dd3554f54-9830ab78f9" PRIMARY KEY (id) + );` + ) + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS "login_activity" ( + "id" uuid NOT NULL DEFAULT uuid_generate_v4(), + "username" varchar NOT NULL, + "activity_code" integer NOT NULL, + "message" varchar NOT NULL, + "attemptedDateTime" timestamp NOT NULL DEFAULT now());` + ) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP TABLE user`) + await queryRunner.query(`DROP TABLE roles`) + await queryRunner.query(`DROP TABLE login_history`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/postgres/1720230151484-AddWorkspace.ts b/packages/server/src/enterprise/database/migrations/postgres/1720230151484-AddWorkspace.ts new file mode 100644 index 00000000000..1bd4dac3269 --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/postgres/1720230151484-AddWorkspace.ts @@ -0,0 +1,52 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddWorkspace1720230151484 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS workspace ( + id uuid NOT NULL DEFAULT uuid_generate_v4(), + "name" varchar NOT NULL, + "description" varchar NULL, + "createdDate" timestamp NOT NULL DEFAULT now(), + "updatedDate" timestamp NOT NULL DEFAULT now(), + CONSTRAINT "PK_98719043dd804f55-9830ab99f8" PRIMARY KEY (id) + );` + ) + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS workspace_users ( + id uuid NOT NULL DEFAULT uuid_generate_v4(), + "workspaceId" varchar NOT NULL, + "userId" varchar NOT NULL, + "role" varchar NULL, + CONSTRAINT "PK_98718943dd804f55-9830ab99f8" PRIMARY KEY (id) + );` + ) + + await queryRunner.query(`ALTER TABLE "chat_flow" ADD COLUMN IF NOT EXISTS "workspaceId" varchar;`) + await queryRunner.query(`ALTER TABLE "tool" ADD COLUMN IF NOT EXISTS "workspaceId" varchar;`) + await queryRunner.query(`ALTER TABLE "assistant" ADD COLUMN IF NOT EXISTS "workspaceId" varchar;`) + await queryRunner.query(`ALTER TABLE "credential" ADD COLUMN IF NOT EXISTS "workspaceId" varchar;`) + await queryRunner.query(`ALTER TABLE "document_store" ADD COLUMN IF NOT EXISTS "workspaceId" varchar;`) + await queryRunner.query(`ALTER TABLE "evaluation" ADD COLUMN IF NOT EXISTS "workspaceId" varchar;`) + await queryRunner.query(`ALTER TABLE "evaluator" ADD COLUMN IF NOT EXISTS "workspaceId" varchar;`) + await queryRunner.query(`ALTER TABLE "dataset" ADD COLUMN IF NOT EXISTS "workspaceId" varchar;`) + await queryRunner.query(`ALTER TABLE "apikey" ADD COLUMN IF NOT EXISTS "workspaceId" varchar;`) + await queryRunner.query(`ALTER TABLE "variable" ADD COLUMN IF NOT EXISTS "workspaceId" varchar;`) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP TABLE workspace`) + await queryRunner.query(`DROP TABLE workspace_users`) + + await queryRunner.query(`ALTER TABLE "chat_flow" DROP COLUMN "workspaceId";`) + await queryRunner.query(`ALTER TABLE "tool" DROP COLUMN "workspaceId";`) + await queryRunner.query(`ALTER TABLE "assistant" DROP COLUMN "workspaceId";`) + await queryRunner.query(`ALTER TABLE "credential" DROP COLUMN "workspaceId";`) + await queryRunner.query(`ALTER TABLE "document_store" DROP COLUMN "workspaceId";`) + await queryRunner.query(`ALTER TABLE "evaluation" DROP COLUMN "workspaceId";`) + await queryRunner.query(`ALTER TABLE "evaluator" DROP COLUMN "workspaceId";`) + await queryRunner.query(`ALTER TABLE "dataset" DROP COLUMN "workspaceId";`) + await queryRunner.query(`ALTER TABLE "apikey" DROP COLUMN "workspaceId";`) + await queryRunner.query(`ALTER TABLE "variable" DROP COLUMN "workspaceId";`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/postgres/1726654922034-AddWorkspaceShared.ts b/packages/server/src/enterprise/database/migrations/postgres/1726654922034-AddWorkspaceShared.ts new file mode 100644 index 00000000000..b1c6b2ef068 --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/postgres/1726654922034-AddWorkspaceShared.ts @@ -0,0 +1,21 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddWorkspaceShared1726654922034 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS "workspace_shared" ( + id uuid NOT NULL DEFAULT uuid_generate_v4(), + "workspaceId" varchar NOT NULL, + "sharedItemId" varchar NOT NULL, + "itemType" varchar NOT NULL, + "createdDate" timestamp NOT NULL DEFAULT now(), + "updatedDate" timestamp NOT NULL DEFAULT now(), + CONSTRAINT "PK_90016043dd804f55-9830ab97f8" PRIMARY KEY (id) + );` + ) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP TABLE workspace_shared`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/postgres/1726655750383-AddWorkspaceIdToCustomTemplate.ts b/packages/server/src/enterprise/database/migrations/postgres/1726655750383-AddWorkspaceIdToCustomTemplate.ts new file mode 100644 index 00000000000..f15338eca8f --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/postgres/1726655750383-AddWorkspaceIdToCustomTemplate.ts @@ -0,0 +1,11 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddWorkspaceIdToCustomTemplate1726655750383 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE "custom_template" ADD COLUMN IF NOT EXISTS "workspaceId" varchar;`) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE "custom_template" DROP COLUMN "workspaceId";`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/postgres/1727798417345-AddOrganization.ts b/packages/server/src/enterprise/database/migrations/postgres/1727798417345-AddOrganization.ts new file mode 100644 index 00000000000..571d23da956 --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/postgres/1727798417345-AddOrganization.ts @@ -0,0 +1,25 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddOrganization1727798417345 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS organization ( + id uuid NOT NULL DEFAULT uuid_generate_v4(), + "name" varchar NOT NULL, + "adminUserId" varchar NULL, + "defaultWsId" varchar NULL, + "organization_type" varchar NULL, + "createdDate" timestamp NOT NULL DEFAULT now(), + "updatedDate" timestamp NOT NULL DEFAULT now(), + CONSTRAINT "PK_99619041dd804f00-9830ab99f8" PRIMARY KEY (id) + );` + ) + await queryRunner.query(`ALTER TABLE "workspace" ADD COLUMN IF NOT EXISTS "organizationId" varchar;`) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP TABLE organization`) + + await queryRunner.query(`ALTER TABLE "workspace" DROP COLUMN "organizationId";`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/postgres/1729130948686-LinkWorkspaceId.ts b/packages/server/src/enterprise/database/migrations/postgres/1729130948686-LinkWorkspaceId.ts new file mode 100644 index 00000000000..a31a045606e --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/postgres/1729130948686-LinkWorkspaceId.ts @@ -0,0 +1,429 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class LinkWorkspaceId1729130948686 implements MigrationInterface { + name = 'LinkWorkspaceId1729130948686' + + public async up(queryRunner: QueryRunner): Promise { + // step 1 - convert from varchar to UUID type + await queryRunner.query(` + ALTER TABLE "apikey" ALTER COLUMN "workspaceId" SET DATA TYPE UUID USING "workspaceId"::UUID; + `) + + // step 2 - add foreign key constraint + await queryRunner.query(` + ALTER TABLE "apikey" ADD CONSTRAINT "fk_apikey_workspaceId" FOREIGN KEY ("workspaceId") REFERENCES "workspace"("id"); + `) + + // step 3 - create index for workspaceId + await queryRunner.query(` + CREATE INDEX "idx_apikey_workspaceId" ON "apikey"("workspaceId"); + `) + + // step 1 - convert from varchar to UUID type + await queryRunner.query(` + ALTER TABLE "user" ALTER COLUMN "activeWorkspaceId" SET DATA TYPE UUID USING "activeWorkspaceId"::UUID; + `) + + // step 2 - add foreign key constraint + await queryRunner.query(` + ALTER TABLE "user" ADD CONSTRAINT "fk_user_activeWorkspaceId" FOREIGN KEY ("activeWorkspaceId") REFERENCES "workspace"("id"); + `) + + // step 3 - create index for activeWorkspaceId + await queryRunner.query(` + CREATE INDEX "idx_user_activeWorkspaceId" ON "user"("activeWorkspaceId"); + `) + + // step 1 - convert from varchar to UUID type + await queryRunner.query(` + ALTER TABLE "workspace_users" ALTER COLUMN "workspaceId" SET DATA TYPE UUID USING "workspaceId"::UUID; + `) + + // step 2 - add foreign key constraint + await queryRunner.query(` + ALTER TABLE "workspace_users" ADD CONSTRAINT "fk_workspace_users_workspaceId" FOREIGN KEY ("workspaceId") REFERENCES "workspace"("id"); + `) + + // step 3 - create index for workspaceId + await queryRunner.query(` + CREATE INDEX "idx_workspace_users_workspaceId" ON "workspace_users"("workspaceId"); + `) + + // step 1 - convert from varchar to UUID type + await queryRunner.query(` + ALTER TABLE "chat_flow" ALTER COLUMN "workspaceId" SET DATA TYPE UUID USING "workspaceId"::UUID; + `) + + // step 2 - add foreign key constraint + await queryRunner.query(` + ALTER TABLE "chat_flow" ADD CONSTRAINT "fk_chat_flow_workspaceId" FOREIGN KEY ("workspaceId") REFERENCES "workspace"("id"); + `) + + // step 3 - create index for workspaceId + await queryRunner.query(` + CREATE INDEX "idx_chat_flow_workspaceId" ON "chat_flow"("workspaceId"); + `) + + // step 1 - convert from varchar to UUID type + await queryRunner.query(` + ALTER TABLE "tool" ALTER COLUMN "workspaceId" SET DATA TYPE UUID USING "workspaceId"::UUID; + `) + + // step 2 - add foreign key constraint + await queryRunner.query(` + ALTER TABLE "tool" ADD CONSTRAINT "fk_tool_workspaceId" FOREIGN KEY ("workspaceId") REFERENCES "workspace"("id"); + `) + + // step 3 - create index for workspaceId + await queryRunner.query(` + CREATE INDEX "idx_tool_workspaceId" ON "tool"("workspaceId"); + `) + + // step 1 - convert from varchar to UUID type + await queryRunner.query(` + ALTER TABLE "assistant" ALTER COLUMN "workspaceId" SET DATA TYPE UUID USING "workspaceId"::UUID; + `) + + // step 2 - add foreign key constraint + await queryRunner.query(` + ALTER TABLE "assistant" ADD CONSTRAINT "fk_assistant_workspaceId" FOREIGN KEY ("workspaceId") REFERENCES "workspace"("id"); + `) + + // step 3 - create index for workspaceId + await queryRunner.query(` + CREATE INDEX "idx_assistant_workspaceId" ON "assistant"("workspaceId"); + `) + + // step 1 - convert from varchar to UUID type + await queryRunner.query(` + ALTER TABLE "credential" ALTER COLUMN "workspaceId" SET DATA TYPE UUID USING "workspaceId"::UUID; + `) + + // step 2 - add foreign key constraint + await queryRunner.query(` + ALTER TABLE "credential" ADD CONSTRAINT "fk_credential_workspaceId" FOREIGN KEY ("workspaceId") REFERENCES "workspace"("id"); + `) + + // step 3 - create index for workspaceId + await queryRunner.query(` + CREATE INDEX "idx_credential_workspaceId" ON "credential"("workspaceId"); + `) + + // step 1 - convert from varchar to UUID type + await queryRunner.query(` + ALTER TABLE "document_store" ALTER COLUMN "workspaceId" SET DATA TYPE UUID USING "workspaceId"::UUID; + `) + + // step 2 - add foreign key constraint + await queryRunner.query(` + ALTER TABLE "document_store" ADD CONSTRAINT "fk_document_store_workspaceId" FOREIGN KEY ("workspaceId") REFERENCES "workspace"("id"); + `) + + // step 3 - create index for workspaceId + await queryRunner.query(` + CREATE INDEX "idx_document_store_workspaceId" ON "document_store"("workspaceId"); + `) + + // step 1 - convert from varchar to UUID type + await queryRunner.query(` + ALTER TABLE "evaluation" ALTER COLUMN "workspaceId" SET DATA TYPE UUID USING "workspaceId"::UUID; + `) + + // step 2 - add foreign key constraint + await queryRunner.query(` + ALTER TABLE "evaluation" ADD CONSTRAINT "fk_evaluation_workspaceId" FOREIGN KEY ("workspaceId") REFERENCES "workspace"("id"); + `) + + // step 3 - create index for workspaceId + await queryRunner.query(` + CREATE INDEX "idx_evaluation_workspaceId" ON "evaluation"("workspaceId"); + `) + + // step 1 - convert from varchar to UUID type + await queryRunner.query(` + ALTER TABLE "evaluator" ALTER COLUMN "workspaceId" SET DATA TYPE UUID USING "workspaceId"::UUID; + `) + + // step 2 - add foreign key constraint + await queryRunner.query(` + ALTER TABLE "evaluator" ADD CONSTRAINT "fk_evaluator_workspaceId" FOREIGN KEY ("workspaceId") REFERENCES "workspace"("id"); + `) + + // step 3 - create index for workspaceId + await queryRunner.query(` + CREATE INDEX "idx_evaluator_workspaceId" ON "evaluator"("workspaceId"); + `) + + // step 1 - convert from varchar to UUID type + await queryRunner.query(` + ALTER TABLE "dataset" ALTER COLUMN "workspaceId" SET DATA TYPE UUID USING "workspaceId"::UUID; + `) + + // step 2 - add foreign key constraint + await queryRunner.query(` + ALTER TABLE "dataset" ADD CONSTRAINT "fk_dataset_workspaceId" FOREIGN KEY ("workspaceId") REFERENCES "workspace"("id"); + `) + + // step 3 - create index for workspaceId + await queryRunner.query(` + CREATE INDEX "idx_dataset_workspaceId" ON "dataset"("workspaceId"); + `) + + // step 1 - convert from varchar to UUID type + await queryRunner.query(` + ALTER TABLE "variable" ALTER COLUMN "workspaceId" SET DATA TYPE UUID USING "workspaceId"::UUID; + `) + + // step 2 - add foreign key constraint + await queryRunner.query(` + ALTER TABLE "variable" ADD CONSTRAINT "fk_variable_workspaceId" FOREIGN KEY ("workspaceId") REFERENCES "workspace"("id"); + `) + + // step 3 - create index for workspaceId + await queryRunner.query(` + CREATE INDEX "idx_variable_workspaceId" ON "variable"("workspaceId"); + `) + + // step 1 - convert from varchar to UUID type + await queryRunner.query(` + ALTER TABLE "workspace_shared" ALTER COLUMN "workspaceId" SET DATA TYPE UUID USING "workspaceId"::UUID; + `) + + // step 2 - add foreign key constraint + await queryRunner.query(` + ALTER TABLE "workspace_shared" ADD CONSTRAINT "fk_workspace_shared_workspaceId" FOREIGN KEY ("workspaceId") REFERENCES "workspace"("id"); + `) + + // step 3 - create index for workspaceId + await queryRunner.query(` + CREATE INDEX "idx_workspace_shared_workspaceId" ON "workspace_shared"("workspaceId"); + `) + + // step 1 - convert from varchar to UUID type + await queryRunner.query(` + ALTER TABLE "custom_template" ALTER COLUMN "workspaceId" SET DATA TYPE UUID USING "workspaceId"::UUID; + `) + + // step 2 - add foreign key constraint + await queryRunner.query(` + ALTER TABLE "custom_template" ADD CONSTRAINT "fk_custom_template_workspaceId" FOREIGN KEY ("workspaceId") REFERENCES "workspace"("id"); + `) + + // step 3 - create index for workspaceId + await queryRunner.query(` + CREATE INDEX "idx_custom_template_workspaceId" ON "custom_template"("workspaceId"); + `) + } + + public async down(queryRunner: QueryRunner): Promise { + // step 1 - drop index + await queryRunner.query(` + DROP INDEX "idx_apikey_workspaceId"; + `) + + // step 2 - drop foreign key constraint + await queryRunner.query(` + ALTER TABLE "apikey" DROP CONSTRAINT "fk_apikey_workspaceId"; + `) + + // Step 3 - convert from UUID to varchar type + await queryRunner.query(` + ALTER TABLE "apikey" ALTER COLUMN "workspaceId" SET DATA TYPE varchar USING "workspaceId"::varchar; + `) + + // step 1 - drop index + await queryRunner.query(` + DROP INDEX "idx_user_activeWorkspaceId"; + `) + + // step 2 - drop foreign key constraint + await queryRunner.query(` + ALTER TABLE "user" DROP CONSTRAINT "fk_user_activeWorkspaceId"; + `) + + // Step 3 - convert from UUID to varchar type + await queryRunner.query(` + ALTER TABLE "user" ALTER COLUMN "activeWorkspaceId" SET DATA TYPE varchar USING "activeWorkspaceId"::varchar; + `) + + // step 1 - drop index + await queryRunner.query(` + DROP INDEX "idx_workspace_users_workspaceId"; + `) + + // step 2 - drop foreign key constraint + await queryRunner.query(` + ALTER TABLE "workspace_users" DROP CONSTRAINT "fk_workspace_users_workspaceId"; + `) + + // Step 3 - convert from UUID to varchar type + await queryRunner.query(` + ALTER TABLE "workspace_users" ALTER COLUMN "workspaceId" SET DATA TYPE varchar USING "workspaceId"::varchar; + `) + + // step 1 - drop index + await queryRunner.query(` + DROP INDEX "idx_chat_flow_workspaceId"; + `) + + // step 2 - drop foreign key constraint + await queryRunner.query(` + ALTER TABLE "chat_flow" DROP CONSTRAINT "fk_chat_flow_workspaceId"; + `) + + // Step 3 - convert from UUID to varchar type + await queryRunner.query(` + ALTER TABLE "chat_flow" ALTER COLUMN "workspaceId" SET DATA TYPE varchar USING "workspaceId"::varchar; + `) + + // step 1 - drop index + await queryRunner.query(` + DROP INDEX "idx_tool_workspaceId"; + `) + + // step 2 - drop foreign key constraint + await queryRunner.query(` + ALTER TABLE "tool" DROP CONSTRAINT "fk_tool_workspaceId"; + `) + + // Step 3 - convert from UUID to varchar type + await queryRunner.query(` + ALTER TABLE "tool" ALTER COLUMN "workspaceId" SET DATA TYPE varchar USING "workspaceId"::varchar; + `) + + // step 1 - drop index + await queryRunner.query(` + DROP INDEX "idx_assistant_workspaceId"; + `) + + // step 2 - drop foreign key constraint + await queryRunner.query(` + ALTER TABLE "assistant" DROP CONSTRAINT "fk_assistant_workspaceId"; + `) + + // Step 3 - convert from UUID to varchar type + await queryRunner.query(` + ALTER TABLE "assistant" ALTER COLUMN "workspaceId" SET DATA TYPE varchar USING "workspaceId"::varchar; + `) + + // step 1 - drop index + await queryRunner.query(` + DROP INDEX "idx_credential_workspaceId"; + `) + + // step 2 - drop foreign key constraint + await queryRunner.query(` + ALTER TABLE "credential" DROP CONSTRAINT "fk_credential_workspaceId"; + `) + + // Step 3 - convert from UUID to varchar type + await queryRunner.query(` + ALTER TABLE "credential" ALTER COLUMN "workspaceId" SET DATA TYPE varchar USING "workspaceId"::varchar; + `) + + // step 1 - drop index + await queryRunner.query(` + DROP INDEX "idx_document_store_workspaceId"; + `) + + // step 2 - drop foreign key constraint + await queryRunner.query(` + ALTER TABLE "document_store" DROP CONSTRAINT "fk_document_store_workspaceId"; + `) + + // Step 3 - convert from UUID to varchar type + await queryRunner.query(` + ALTER TABLE "document_store" ALTER COLUMN "workspaceId" SET DATA TYPE varchar USING "workspaceId"::varchar; + `) + + // step 1 - drop index + await queryRunner.query(` + DROP INDEX "idx_evaluation_workspaceId"; + `) + + // step 2 - drop foreign key constraint + await queryRunner.query(` + ALTER TABLE "evaluation" DROP CONSTRAINT "fk_evaluation_workspaceId"; + `) + + // Step 3 - convert from UUID to varchar type + await queryRunner.query(` + ALTER TABLE "evaluation" ALTER COLUMN "workspaceId" SET DATA TYPE varchar USING "workspaceId"::varchar; + `) + + // step 1 - drop index + await queryRunner.query(` + DROP INDEX "idx_evaluator_workspaceId"; + `) + + // step 2 - drop foreign key constraint + await queryRunner.query(` + ALTER TABLE "evaluator" DROP CONSTRAINT "fk_evaluator_workspaceId"; + `) + + // Step 3 - convert from UUID to varchar type + await queryRunner.query(` + ALTER TABLE "evaluator" ALTER COLUMN "workspaceId" SET DATA TYPE varchar USING "workspaceId"::varchar; + `) + + // step 1 - drop index + await queryRunner.query(` + DROP INDEX "idx_dataset_workspaceId"; + `) + + // step 2 - drop foreign key constraint + await queryRunner.query(` + ALTER TABLE "dataset" DROP CONSTRAINT "fk_dataset_workspaceId"; + `) + + // Step 3 - convert from UUID to varchar type + await queryRunner.query(` + ALTER TABLE "dataset" ALTER COLUMN "workspaceId" SET DATA TYPE varchar USING "workspaceId"::varchar; + `) + + // step 1 - drop index + await queryRunner.query(` + DROP INDEX "idx_variable_workspaceId"; + `) + + // step 2 - drop foreign key constraint + await queryRunner.query(` + ALTER TABLE "variable" DROP CONSTRAINT "fk_variable_workspaceId"; + `) + + // Step 3 - convert from UUID to varchar type + await queryRunner.query(` + ALTER TABLE "variable" ALTER COLUMN "workspaceId" SET DATA TYPE varchar USING "workspaceId"::varchar; + `) + + // step 1 - drop index + await queryRunner.query(` + DROP INDEX "idx_workspace_shared_workspaceId"; + `) + + // step 2 - drop foreign key constraint + await queryRunner.query(` + ALTER TABLE "workspace_shared" DROP CONSTRAINT "fk_workspace_shared_workspaceId"; + `) + + // Step 3 - convert from UUID to varchar type + await queryRunner.query(` + ALTER TABLE "workspace_shared" ALTER COLUMN "workspaceId" SET DATA TYPE varchar USING "workspaceId"::varchar; + `) + + // step 1 - drop index + await queryRunner.query(` + DROP INDEX "idx_custom_template_workspaceId"; + `) + + // step 2 - drop foreign key constraint + await queryRunner.query(` + ALTER TABLE "custom_template" DROP CONSTRAINT "fk_custom_template_workspaceId"; + `) + + // Step 3 - convert from UUID to varchar type + await queryRunner.query(` + ALTER TABLE "custom_template" ALTER COLUMN "workspaceId" SET DATA TYPE varchar USING "workspaceId"::varchar; + `) + } +} diff --git a/packages/server/src/enterprise/database/migrations/postgres/1729133111652-LinkOrganizationId.ts b/packages/server/src/enterprise/database/migrations/postgres/1729133111652-LinkOrganizationId.ts new file mode 100644 index 00000000000..d7b5aa78a73 --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/postgres/1729133111652-LinkOrganizationId.ts @@ -0,0 +1,39 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class LinkOrganizationId1729133111652 implements MigrationInterface { + name = 'LinkOrganizationId1729133111652' + + public async up(queryRunner: QueryRunner): Promise { + // step 1 - convert from varchar to UUID type + await queryRunner.query(` + ALTER TABLE "workspace" ALTER COLUMN "organizationId" SET DATA TYPE UUID USING "organizationId"::UUID; + `) + + // step 2 - add foreign key constraint + await queryRunner.query(` + ALTER TABLE "workspace" ADD CONSTRAINT "fk_workspace_organizationId" FOREIGN KEY ("organizationId") REFERENCES "organization"("id"); + `) + + // step 3 - create index for organizationId + await queryRunner.query(` + CREATE INDEX "idx_workspace_organizationId" ON "workspace"("organizationId"); + `) + } + + public async down(queryRunner: QueryRunner): Promise { + // step 1 - drop index + await queryRunner.query(` + DROP INDEX "idx_workspace_organizationId"; + `) + + // step 2 - drop foreign key constraint + await queryRunner.query(` + ALTER TABLE "workspace" DROP CONSTRAINT "fk_workspace_organizationId"; + `) + + // Step 3 - convert from UUID to varchar type + await queryRunner.query(` + ALTER TABLE "workspace" ALTER COLUMN "organizationId" SET DATA TYPE varchar USING "organizationId"::varchar; + `) + } +} diff --git a/packages/server/src/enterprise/database/migrations/postgres/1730519457880-AddSSOColumns.ts b/packages/server/src/enterprise/database/migrations/postgres/1730519457880-AddSSOColumns.ts new file mode 100644 index 00000000000..77295a5725a --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/postgres/1730519457880-AddSSOColumns.ts @@ -0,0 +1,15 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddSSOColumns1730519457880 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE "organization" ADD COLUMN IF NOT EXISTS "sso_config" text;`) + await queryRunner.query(`ALTER TABLE "user" ADD COLUMN IF NOT EXISTS "user_type" varchar;`) + await queryRunner.query(`ALTER TABLE "login_activity" ADD COLUMN IF NOT EXISTS "login_mode" varchar;`) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE "organization" DROP COLUMN "sso_config";`) + await queryRunner.query(`ALTER TABLE "user" DROP COLUMN "user_type";`) + await queryRunner.query(`ALTER TABLE "login_activity" DROP COLUMN "login_mode";`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/postgres/1734074497540-AddPersonalWorkspace.ts b/packages/server/src/enterprise/database/migrations/postgres/1734074497540-AddPersonalWorkspace.ts new file mode 100644 index 00000000000..d5ecbbe3eff --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/postgres/1734074497540-AddPersonalWorkspace.ts @@ -0,0 +1,27 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' +import { v4 as uuidv4 } from 'uuid' + +export class AddPersonalWorkspace1734074497540 implements MigrationInterface { + name = 'AddPersonalWorkspace1734074497540' + + public async up(queryRunner: QueryRunner): Promise { + const users = await queryRunner.query(`select * from "user";`) + const organization = await queryRunner.query(`select "id" from "organization";`) + for (let user of users) { + const workspaceDescription = 'Personal Workspace of ' + user.id + const workspaceId = uuidv4() + + await queryRunner.query(` + insert into "workspace" ("id", "name", "description", "organizationId") + values('${workspaceId}', 'Personal Workspace', '${workspaceDescription}', '${organization[0].id}'); + `) + + await queryRunner.query(` + insert into "workspace_users" ("workspaceId", "userId", "role") + values('${workspaceId}', '${user.id}', 'pw'); + `) + } + } + + public async down(): Promise {} +} diff --git a/packages/server/src/enterprise/database/migrations/postgres/1737076223692-RefactorEnterpriseDatabase.ts b/packages/server/src/enterprise/database/migrations/postgres/1737076223692-RefactorEnterpriseDatabase.ts new file mode 100644 index 00000000000..e40749aca62 --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/postgres/1737076223692-RefactorEnterpriseDatabase.ts @@ -0,0 +1,472 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' +import { decrypt, encrypt } from '../../../utils/encryption.util' +import { LoginMethodStatus } from '../../entities/login-method.entity' +import { OrganizationUserStatus } from '../../entities/organization-user.entity' +import { OrganizationName } from '../../entities/organization.entity' +import { GeneralRole } from '../../entities/role.entity' +import { UserStatus } from '../../entities/user.entity' +import { WorkspaceUserStatus } from '../../entities/workspace-user.entity' +import { WorkspaceName } from '../../entities/workspace.entity' + +export class RefactorEnterpriseDatabase1737076223692 implements MigrationInterface { + name = 'RefactorEnterpriseDatabase1737076223692' + private async modifyTable(queryRunner: QueryRunner): Promise { + /*------------------------------------- + --------------- user ----------------- + --------------------------------------*/ + // rename user table to temp_user + await queryRunner.query(`alter table "user" rename to "temp_user";`) + + // create user table + await queryRunner.query(` + create table "user" ( + "id" uuid default uuid_generate_v4() primary key, + "name" varchar(100) not null, + "email" varchar(255) not null unique, + "credential" text null, + "tempToken" text null, + "tokenExpiry" timestamp null, + "status" varchar(20) default '${UserStatus.UNVERIFIED}' not null, + "createdDate" timestamp default now() not null, + "updatedDate" timestamp default now() not null, + "createdBy" uuid not null, + "updatedBy" uuid not null, + constraint "fk_createdBy" foreign key ("createdBy") references "user" ("id"), + constraint "fk_updatedBy" foreign key ("updatedBy") references "user" ("id") + ); + `) + + /*------------------------------------- + ----------- organization -------------- + --------------------------------------*/ + // rename organization table to temp_organization + await queryRunner.query(`alter table "organization" rename to "temp_organization";`) + + // create organization table + await queryRunner.query(` + create table "organization" ( + "id" uuid default uuid_generate_v4() primary key, + "name" varchar(100) default '${OrganizationName.DEFAULT_ORGANIZATION}' not null, + "customerId" varchar(100) null, + "subscriptionId" varchar(100) null, + "createdDate" timestamp default now() not null, + "updatedDate" timestamp default now() not null, + "createdBy" uuid not null, + "updatedBy" uuid not null, + constraint "fk_createdBy" foreign key ("createdBy") references "user" ("id"), + constraint "fk_updatedBy" foreign key ("updatedBy") references "user" ("id") + ); + `) + + /*------------------------------------- + ----------- login method -------------- + --------------------------------------*/ + // create login_method table + await queryRunner.query(` + create table "login_method" ( + "id" uuid default uuid_generate_v4() primary key, + "organizationId" uuid null, + "name" varchar(100) not null, + "config" text not null, + "status" varchar(20) default '${LoginMethodStatus.ENABLE}' not null, + "createdDate" timestamp default now() not null, + "updatedDate" timestamp default now() not null, + "createdBy" uuid null, + "updatedBy" uuid null, + constraint "fk_organizationId" foreign key ("organizationId") references "organization" ("id"), + constraint "fk_createdBy" foreign key ("createdBy") references "user" ("id"), + constraint "fk_updatedBy" foreign key ("updatedBy") references "user" ("id") + ); + `) + + /*------------------------------------- + --------------- role ------------------ + --------------------------------------*/ + // rename roles table to temp_role + await queryRunner.query(`alter table "roles" rename to "temp_role";`) + + // create organization_login_method table + await queryRunner.query(` + create table "role" ( + "id" uuid default uuid_generate_v4() primary key, + "organizationId" uuid null, + "name" varchar(100) not null, + "description" text null, + "permissions" text not null, + "createdDate" timestamp default now() not null, + "updatedDate" timestamp default now() not null, + "createdBy" uuid null, + "updatedBy" uuid null, + constraint "fk_organizationId" foreign key ("organizationId") references "organization" ("id"), + constraint "fk_createdBy" foreign key ("createdBy") references "user" ("id"), + constraint "fk_updatedBy" foreign key ("updatedBy") references "user" ("id") + ); + `) + + /*------------------------------------- + ---------- organization_user ---------- + --------------------------------------*/ + // create organization_user table + await queryRunner.query(` + create table "organization_user" ( + "organizationId" uuid not null, + "userId" uuid not null, + "roleId" uuid not null, + "status" varchar(20) default '${OrganizationUserStatus.ACTIVE}' not null, + "createdDate" timestamp default now() not null, + "updatedDate" timestamp default now() not null, + "createdBy" uuid not null, + "updatedBy" uuid not null, + constraint "pk_organization_user" primary key ("organizationId", "userId"), + constraint "fk_organizationId" foreign key ("organizationId") references "organization" ("id"), + constraint "fk_userId" foreign key ("userId") references "user" ("id"), + constraint "fk_roleId" foreign key ("roleId") references "role" ("id"), + constraint "fk_createdBy" foreign key ("createdBy") references "user" ("id"), + constraint "fk_updatedBy" foreign key ("updatedBy") references "user" ("id") + ); + `) + + /*------------------------------------- + ------------- workspace --------------- + --------------------------------------*/ + // modify workspace table + await queryRunner.query(` + alter table "workspace" + drop constraint "fk_workspace_organizationId", + alter column "organizationId" set not null, + alter column "name" type varchar(100), + alter column "description" type text, + add column "createdBy" uuid null, + add column "updatedBy" uuid null, + add constraint "fk_createdBy" foreign key ("createdBy") references "user" ("id"), + add constraint "fk_updatedBy" foreign key ("updatedBy") references "user" ("id"); + `) + + // remove first if needed will be add back, will cause insert to slow + await queryRunner.query(` + drop index "idx_workspace_organizationId"; + `) + + /*------------------------------------- + ----------- workspace_user ------------ + --------------------------------------*/ + // rename workspace_users table to temp_workspace_user + await queryRunner.query(`alter table "workspace_users" rename to "temp_workspace_user";`) + + // create workspace_user table + await queryRunner.query(` + create table "workspace_user" ( + "workspaceId" uuid not null, + "userId" uuid not null, + "roleId" uuid not null, + "status" varchar(20) default '${WorkspaceUserStatus.INVITED}' not null, + "lastLogin" timestamp null, + "createdDate" timestamp default now() not null, + "updatedDate" timestamp default now() not null, + "createdBy" uuid not null, + "updatedBy" uuid not null, + constraint "pk_workspace_user" primary key ("workspaceId", "userId"), + constraint "fk_workspaceId" foreign key ("workspaceId") references "workspace" ("id"), + constraint "fk_userId" foreign key ("userId") references "user" ("id"), + constraint "fk_roleId" foreign key ("roleId") references "role" ("id"), + constraint "fk_createdBy" foreign key ("createdBy") references "user" ("id"), + constraint "fk_updatedBy" foreign key ("updatedBy") references "user" ("id") + ); + `) + } + + private async deleteWorkspaceWithoutUser(queryRunner: QueryRunner) { + const workspaceWithoutUser = await queryRunner.query(` + select w."id" as "id" from "workspace_user" as "wu" + right join "workspace" as "w" on "wu"."workspaceId" = "w"."id" + where "wu"."userId" is null; + `) + const workspaceIds = workspaceWithoutUser.map((workspace: { id: string }) => `'${workspace.id}'`).join(',') + + // Delete related records from other tables that reference the deleted workspaces + if (workspaceIds && workspaceIds.length > 0) { + await queryRunner.query(` + delete from "workspace_user" where "workspaceId" in (${workspaceIds}); + `) + await queryRunner.query(` + delete from "apikey" where "workspaceId" in (${workspaceIds}); + `) + await queryRunner.query(` + delete from "assistant" where "workspaceId" in (${workspaceIds}); + `) + const chatflows = await queryRunner.query(` + select id from "chat_flow" where "workspaceId" in (${workspaceIds}); + `) + const chatflowIds = chatflows.map((chatflow: { id: string }) => `'${chatflow.id}'`).join(',') + if (chatflowIds && chatflowIds.length > 0) { + await queryRunner.query(` + delete from "chat_flow" where "workspaceId" in (${workspaceIds}); + `) + await queryRunner.query(` + delete from "upsert_history" where "chatflowid" in (${chatflowIds}); + `) + await queryRunner.query(` + delete from "chat_message" where "chatflowid" in (${chatflowIds}); + `) + await queryRunner.query(` + delete from "chat_message_feedback" where "chatflowid" in (${chatflowIds}); + `) + } + await queryRunner.query(` + delete from "credential" where "workspaceId" in (${workspaceIds}); + `) + await queryRunner.query(` + delete from "custom_template" where "workspaceId" in (${workspaceIds}); + `) + const datasets = await queryRunner.query(` + select id from "dataset" where "workspaceId" in (${workspaceIds}); + `) + const datasetIds = datasets.map((dataset: { id: string }) => `'${dataset.id}'`).join(',') + if (datasetIds && datasetIds.length > 0) { + await queryRunner.query(` + delete from "dataset" where "workspaceId" in (${workspaceIds}); + `) + await queryRunner.query(` + delete from "dataset_row" where "datasetId" in (${datasetIds}); + `) + } + const documentStores = await queryRunner.query(` + select id from "document_store" where "workspaceId" in (${workspaceIds}); + `) + const documentStoreIds = documentStores.map((documentStore: { id: string }) => `'${documentStore.id}'`).join(',') + if (documentStoreIds && documentStoreIds.length > 0) { + await queryRunner.query(` + delete from "document_store" where "workspaceId" in (${workspaceIds}); + `) + await queryRunner.query(` + delete from "document_store_file_chunk" where "storeId" in (${documentStoreIds}); + `) + } + const evaluations = await queryRunner.query(` + select id from "evaluation" where "workspaceId" in (${workspaceIds}); + `) + const evaluationIds = evaluations.map((evaluation: { id: string }) => `'${evaluation.id}'`).join(',') + if (evaluationIds && evaluationIds.length > 0) { + await queryRunner.query(` + delete from "evaluation" where "workspaceId" in (${workspaceIds}); + `) + await queryRunner.query(` + delete from "evaluation_run" where "evaluationId" in (${evaluationIds}); + `) + } + await queryRunner.query(` + delete from "evaluator" where "workspaceId" in (${workspaceIds}); + `) + await queryRunner.query(` + delete from "tool" where "workspaceId" in (${workspaceIds}); + `) + await queryRunner.query(` + delete from "variable" where "workspaceId" in (${workspaceIds}); + `) + await queryRunner.query(` + delete from "workspace_shared" where "workspaceId" in (${workspaceIds}); + `) + await queryRunner.query(` + delete from "workspace" where "id" in (${workspaceIds}); + `) + } + } + + private async populateTable(queryRunner: QueryRunner): Promise { + // insert generalRole + const generalRole = [ + { + name: 'owner', + description: 'Has full control over the organization.', + permissions: '["organization","workspace"]' + }, + { + name: 'member', + description: 'Has limited control over the organization.', + permissions: '[]' + }, + { + name: 'personal workspace', + description: 'Has full control over the personal workspace', + permissions: + '[ "chatflows:view", "chatflows:create", "chatflows:update", "chatflows:duplicate", "chatflows:delete", "chatflows:export", "chatflows:import", "chatflows:config", "chatflows:domains", "agentflows:view", "agentflows:create", "agentflows:update", "agentflows:duplicate", "agentflows:delete", "agentflows:export", "agentflows:import", "agentflows:config", "agentflows:domains", "tools:view", "tools:create", "tools:update", "tools:delete", "tools:export", "assistants:view", "assistants:create", "assistants:update", "assistants:delete", "credentials:view", "credentials:create", "credentials:update", "credentials:delete", "credentials:share", "variables:view", "variables:create", "variables:update", "variables:delete", "apikeys:view", "apikeys:create", "apikeys:update", "apikeys:delete", "apikeys:import", "documentStores:view", "documentStores:create", "documentStores:update", "documentStores:delete", "documentStores:add-loader", "documentStores:delete-loader", "documentStores:preview-process", "documentStores:upsert-config", "datasets:view", "datasets:create", "datasets:update", "datasets:delete", "evaluators:view", "evaluators:create", "evaluators:update", "evaluators:delete", "evaluations:view", "evaluations:create", "evaluations:update", "evaluations:delete", "evaluations:run", "templates:marketplace", "templates:custom", "templates:custom-delete", "templates:toolexport", "templates:flowexport", "templates:custom-share", "workspace:export", "workspace:import", "executions:view", "executions:delete" ]' + } + ] + for (let role of generalRole) { + await queryRunner.query(` + insert into "role"("name", "description", "permissions") + values('${role.name}', '${role.description}', '${role.permissions}'); + `) + } + + const users = await queryRunner.query('select * from "temp_user";') + const noExistingData = users.length > 0 === false + if (noExistingData) return + + const organizations = await queryRunner.query('select * from "temp_organization";') + const organizationId = organizations[0].id + const adminUserId = organizations[0].adminUserId + const ssoConfig = organizations[0].sso_config ? JSON.parse(await decrypt(organizations[0].sso_config)).providers : [] + + /*------------------------------------- + --------------- user ----------------- + --------------------------------------*/ + // insert user with temp_user data + await queryRunner.query(` + insert into "user" ("id", "name", "email", "credential", "tempToken", "tokenExpiry", "status", "createdBy", "updatedBy") + select tu."id", coalesce(tu."name", tu."email"), tu."email", tu."credential", tu."tempToken", tu."tokenExpiry", tu."status", + '${adminUserId}', '${adminUserId}' + from "temp_user" as "tu"; + `) + + /*------------------------------------- + ----------- organization -------------- + --------------------------------------*/ + // insert organization with temp_organization data + await queryRunner.query(` + insert into "organization" ("id", "name", "createdBy", "updatedBy") + select "id", "name", "adminUserId"::uuid, "adminUserId"::uuid from "temp_organization"; + `) + + /*------------------------------------- + ----------- login method -------------- + --------------------------------------*/ + // insert login_method with temp_organization data + for (let config of ssoConfig) { + const newConfigFormat = { + domain: config.domain === '' || config.domain === undefined ? undefined : config.domain, + tenantID: config.tenantID === '' || config.tenantID === undefined ? undefined : config.tenantID, + clientID: config.clientID === '' || config.clientID === undefined ? undefined : config.clientID, + clientSecret: config.clientSecret === '' || config.clientSecret === undefined ? undefined : config.clientSecret + } + const status = config.configEnabled === true ? LoginMethodStatus.ENABLE : LoginMethodStatus.DISABLE + + const allUndefined = Object.values(newConfigFormat).every((value) => value === undefined) + if (allUndefined && status === LoginMethodStatus.DISABLE) continue + const encryptData = await encrypt(JSON.stringify(newConfigFormat)) + + await queryRunner.query(` + insert into "login_method" ("organizationId", "name", "config", "status", "createdBy", "updatedBy") + values('${organizationId}','${config.providerName}','${encryptData}','${status}','${adminUserId}','${adminUserId}'); + `) + } + + /*------------------------------------- + --------------- role ------------------ + --------------------------------------*/ + // insert workspace role into role + const workspaceRole = await queryRunner.query(`select "id", "name", "description", "permissions" from "temp_role";`) + for (let role of workspaceRole) { + role.permissions = JSON.stringify(role.permissions.split(',').filter((permission: string) => permission.trim() !== '')) + const haveDescriptionQuery = `insert into "role" ("id", "organizationId", "name", "description", "permissions", "createdBy", "updatedBy") + values('${role.id}','${organizationId}','${role.name}','${role.description}','${role.permissions}','${adminUserId}','${adminUserId}');` + const noHaveDescriptionQuery = `insert into "role" ("id", "organizationId", "name", "permissions", "createdBy", "updatedBy") + values('${role.id}','${organizationId}','${role.name}','${role.permissions}','${adminUserId}','${adminUserId}');` + const insertRoleQuery = role.description ? haveDescriptionQuery : noHaveDescriptionQuery + await queryRunner.query(insertRoleQuery) + } + + /*------------------------------------- + ---------- organization_user ---------- + --------------------------------------*/ + const roles = await queryRunner.query('select * from "role";') + // insert organization_user with user, role and temp_organization data + for (let user of users) { + const roleId = + user.id === adminUserId + ? roles.find((role: any) => role.name === GeneralRole.OWNER).id + : roles.find((role: any) => role.name === GeneralRole.MEMBER).id + await queryRunner.query(` + insert into "organization_user" ("organizationId", "userId", "roleId", "status", "createdBy", "updatedBy") + values ('${organizationId}','${user.id}','${roleId}','${user.status}','${adminUserId}','${adminUserId}'); + `) + } + + /*------------------------------------- + ------------- workspace --------------- + --------------------------------------*/ + const workspaces = await queryRunner.query('select * from "workspace";') + for (let workspace of workspaces) { + await queryRunner.query( + `update "workspace" set "createdBy" = '${adminUserId}', "updatedBy" = '${adminUserId}' where "id" = '${workspace.id}';` + ) + } + + /*------------------------------------- + ----------- workspace_user ------------ + --------------------------------------*/ + const workspaceUsers = await queryRunner.query('select * from "temp_workspace_user";') + for (let workspaceUser of workspaceUsers) { + switch (workspaceUser.role) { + case 'org_admin': + workspaceUser.role = roles.find((role: any) => role.name === GeneralRole.OWNER).id + break + case 'pw': + workspaceUser.role = roles.find((role: any) => role.name === GeneralRole.PERSONAL_WORKSPACE).id + break + default: + workspaceUser.role = roles.find((role: any) => role.name === workspaceUser.role).id + break + } + const user = users.find((user: any) => user.id === workspaceUser.userId) + const workspace = workspaces.find((workspace: any) => workspace.id === workspaceUser.workspaceId) + if (workspaceUser.workspaceId === user.activeWorkspaceId && user.lastLogin) { + const lastLogin = new Date(user.lastLogin).toISOString() + await queryRunner.query(` + insert into "workspace_user" ("workspaceId", "userId", "roleId", "status", "lastLogin","createdBy", "updatedBy") + values ('${workspaceUser.workspaceId}','${workspaceUser.userId}','${workspaceUser.role}','${WorkspaceUserStatus.ACTIVE}','${lastLogin}','${adminUserId}','${adminUserId}'); + `) + } else if (workspace.name === WorkspaceName.DEFAULT_PERSONAL_WORKSPACE && !user.lastLogin) { + // Skip personal workspaces for users who haven't signed up yet to avoid duplicates when they sign up. + // account.service.ts creates personal workspace during sign-up. + await queryRunner.query(` + delete from "temp_workspace_user" where "workspaceId" = '${workspaceUser.workspaceId}' and "userId" = '${workspaceUser.userId}'; + `) + await queryRunner.query(` + delete from "workspace" where "id" = '${workspaceUser.workspaceId}'; + `) + } else { + await queryRunner.query(` + insert into "workspace_user" ("workspaceId", "userId", "roleId", "status","createdBy", "updatedBy") + values ('${workspaceUser.workspaceId}','${workspaceUser.userId}','${workspaceUser.role}','${WorkspaceUserStatus.INVITED}','${adminUserId}','${adminUserId}'); + `) + } + } + + await this.deleteWorkspaceWithoutUser(queryRunner) + } + + private async deleteTempTable(queryRunner: QueryRunner): Promise { + await queryRunner.query(` + drop table "temp_workspace_user"; + `) + await queryRunner.query(` + drop table "temp_role"; + `) + await queryRunner.query(` + drop table "temp_organization"; + `) + await queryRunner.query(` + drop table "temp_user"; + `) + } + + public async up(queryRunner: QueryRunner): Promise { + await this.modifyTable(queryRunner) + await this.populateTable(queryRunner) + await this.deleteTempTable(queryRunner) + + // This query cannot be part of the modifyTable function because: + // 1. The "organizationId" in the "workspace" table might be referencing data in the "temp_organization" table, so it must be altered last. + // 2. Setting "createdBy" and "updatedBy" to NOT NULL needs to happen after ensuring there’s no existing data that would violate the constraint, + // because altering these columns while there is data could prevent new records from being inserted into the "workspace" table. + await queryRunner.query(` + alter table "workspace" + alter column "createdBy" set not null, + alter column "updatedBy" set not null, + add constraint "fk_organizationId" foreign key ("organizationId") references "organization" ("id"); + `) + } + + public async down(): Promise {} +} diff --git a/packages/server/src/enterprise/database/migrations/postgres/1746862866554-ExecutionLinkWorkspaceId.ts b/packages/server/src/enterprise/database/migrations/postgres/1746862866554-ExecutionLinkWorkspaceId.ts new file mode 100644 index 00000000000..2c8798f63bc --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/postgres/1746862866554-ExecutionLinkWorkspaceId.ts @@ -0,0 +1,43 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class ExecutionLinkWorkspaceId1746862866554 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + // step 1 - add workspaceId column + await queryRunner.query(`ALTER TABLE "execution" ADD COLUMN IF NOT EXISTS "workspaceId" varchar;`) + + // step 2 - convert from varchar to UUID type + await queryRunner.query(` + ALTER TABLE "execution" ALTER COLUMN "workspaceId" SET DATA TYPE UUID USING "workspaceId"::UUID; + `) + + // step 3 - add foreign key constraint + await queryRunner.query(` + ALTER TABLE "execution" ADD CONSTRAINT "fk_execution_workspaceId" FOREIGN KEY ("workspaceId") REFERENCES "workspace"("id"); + `) + + // step 4 - create index for workspaceId + await queryRunner.query(` + CREATE INDEX "idx_execution_workspaceId" ON "execution"("workspaceId"); + `) + } + + public async down(queryRunner: QueryRunner): Promise { + // step 1 - drop index + await queryRunner.query(` + DROP INDEX "idx_execution_workspaceId"; + `) + + // step 2 - drop foreign key constraint + await queryRunner.query(` + ALTER TABLE "execution" DROP CONSTRAINT "fk_execution_workspaceId"; + `) + + // step 3 - convert from UUID to varchar type + await queryRunner.query(` + ALTER TABLE "execution" ALTER COLUMN "workspaceId" SET DATA TYPE varchar USING "workspaceId"::varchar; + `) + + // step 4 - drop workspaceId column + await queryRunner.query(`ALTER TABLE "execution" DROP COLUMN "workspaceId";`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/sqlite/1720230151482-AddAuthTables.ts b/packages/server/src/enterprise/database/migrations/sqlite/1720230151482-AddAuthTables.ts new file mode 100644 index 00000000000..3370af6e780 --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/sqlite/1720230151482-AddAuthTables.ts @@ -0,0 +1,40 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddAuthTables1720230151482 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS "user" ( + "id" varchar PRIMARY KEY NOT NULL, + "role" varchar NOT NULL, + "name" varchar, + "credential" text, + "tempToken" text, + "tokenExpiry" datetime, + "email" varchar NOT NULL, + "status" varchar NOT NULL, + "activeWorkspaceId" varchar NOT NULL, + "lastLogin" datetime);` + ) + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS "roles" ( + "id" varchar PRIMARY KEY NOT NULL, + "name" varchar, + "description" varchar, + "permissions" text);` + ) + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS "login_activity" ( + "id" varchar PRIMARY KEY NOT NULL, + "username" varchar NOT NULL, + "activity_code" integer NOT NULL, + "message" varchar NOT NULL, + "attemptedDateTime" datetime NOT NULL DEFAULT (datetime('now')));` + ) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP TABLE user`) + await queryRunner.query(`DROP TABLE roles`) + await queryRunner.query(`DROP TABLE login_activity`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/sqlite/1720230151484-AddWorkspace.ts b/packages/server/src/enterprise/database/migrations/sqlite/1720230151484-AddWorkspace.ts new file mode 100644 index 00000000000..5718b6a536f --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/sqlite/1720230151484-AddWorkspace.ts @@ -0,0 +1,47 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' +import { ensureColumnExists } from './sqlliteCustomFunctions' + +export class AddWorkspace1720230151484 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS "workspace" ("id" varchar PRIMARY KEY NOT NULL, +"name" text NOT NULL, +"description" varchar, +"createdDate" datetime NOT NULL DEFAULT (datetime('now')), +"updatedDate" datetime NOT NULL DEFAULT (datetime('now')));` + ) + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS "workspace_users" ("id" varchar PRIMARY KEY NOT NULL, +"workspaceId" varchar NOT NULL, +"userId" varchar NOT NULL, +"role" varchar NOT NULL);` + ) + + await ensureColumnExists(queryRunner, 'chat_flow', 'workspaceId', 'TEXT') + await ensureColumnExists(queryRunner, 'tool', 'workspaceId', 'TEXT') + await ensureColumnExists(queryRunner, 'assistant', 'workspaceId', 'TEXT') + await ensureColumnExists(queryRunner, 'credential', 'workspaceId', 'TEXT') + await ensureColumnExists(queryRunner, 'document_store', 'workspaceId', 'TEXT') + await ensureColumnExists(queryRunner, 'evaluation', 'workspaceId', 'TEXT') + await ensureColumnExists(queryRunner, 'evaluator', 'workspaceId', 'TEXT') + await ensureColumnExists(queryRunner, 'dataset', 'workspaceId', 'TEXT') + await ensureColumnExists(queryRunner, 'apikey', 'workspaceId', 'TEXT') + await ensureColumnExists(queryRunner, 'variable', 'workspaceId', 'TEXT') + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP TABLE workspace`) + await queryRunner.query(`DROP TABLE workspace_users`) + + await queryRunner.query(`ALTER TABLE "chat_flow" DROP COLUMN "workspaceId";`) + await queryRunner.query(`ALTER TABLE "tool" DROP COLUMN "workspaceId";`) + await queryRunner.query(`ALTER TABLE "assistant" DROP COLUMN "workspaceId";`) + await queryRunner.query(`ALTER TABLE "credential" DROP COLUMN "workspaceId";`) + await queryRunner.query(`ALTER TABLE "document_store" DROP COLUMN "workspaceId";`) + await queryRunner.query(`ALTER TABLE "evaluation" DROP COLUMN "workspaceId";`) + await queryRunner.query(`ALTER TABLE "evaluator" DROP COLUMN "workspaceId";`) + await queryRunner.query(`ALTER TABLE "dataset" DROP COLUMN "workspaceId";`) + await queryRunner.query(`ALTER TABLE "apikey" DROP COLUMN "workspaceId";`) + await queryRunner.query(`ALTER TABLE "variable" DROP COLUMN "workspaceId";`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/sqlite/1726654922034-AddWorkspaceShared.ts b/packages/server/src/enterprise/database/migrations/sqlite/1726654922034-AddWorkspaceShared.ts new file mode 100644 index 00000000000..3010779816c --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/sqlite/1726654922034-AddWorkspaceShared.ts @@ -0,0 +1,19 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddWorkspaceShared1726654922034 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS "workspace_shared" ( + "id" varchar PRIMARY KEY NOT NULL, + "workspaceId" varchar NOT NULL, + "sharedItemId" varchar NOT NULL, + "itemType" varchar NOT NULL, + "createdDate" datetime NOT NULL DEFAULT (datetime('now')), + "updatedDate" datetime NOT NULL DEFAULT (datetime('now')));` + ) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP TABLE workspace_shared`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/sqlite/1726655750383-AddWorkspaceIdToCustomTemplate.ts b/packages/server/src/enterprise/database/migrations/sqlite/1726655750383-AddWorkspaceIdToCustomTemplate.ts new file mode 100644 index 00000000000..ab6efb66ed5 --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/sqlite/1726655750383-AddWorkspaceIdToCustomTemplate.ts @@ -0,0 +1,11 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class AddWorkspaceIdToCustomTemplate1726655750383 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE "custom_template" ADD COLUMN "workspaceId" TEXT;`) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE "custom_template" DROP COLUMN "workspaceId";`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/sqlite/1727798417345-AddOrganization.ts b/packages/server/src/enterprise/database/migrations/sqlite/1727798417345-AddOrganization.ts new file mode 100644 index 00000000000..79c08aa6c58 --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/sqlite/1727798417345-AddOrganization.ts @@ -0,0 +1,24 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' +import { ensureColumnExists } from './sqlliteCustomFunctions' + +export class AddOrganization1727798417345 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await queryRunner.query( + `CREATE TABLE IF NOT EXISTS "organization" ("id" varchar PRIMARY KEY NOT NULL, +"name" text NOT NULL, +"adminUserId" text, +"defaultWsId" text, +"organization_type" text, +"createdDate" datetime NOT NULL DEFAULT (datetime('now')), +"updatedDate" datetime NOT NULL DEFAULT (datetime('now')));` + ) + + await ensureColumnExists(queryRunner, 'workspace', 'organizationId', 'varchar') + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`DROP TABLE organization`) + + await queryRunner.query(`ALTER TABLE "workspace" DROP COLUMN "organizationId";`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/sqlite/1729130948686-LinkWorkspaceId.ts b/packages/server/src/enterprise/database/migrations/sqlite/1729130948686-LinkWorkspaceId.ts new file mode 100644 index 00000000000..3204e821369 --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/sqlite/1729130948686-LinkWorkspaceId.ts @@ -0,0 +1,874 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export async function linkWorkspaceId(queryRunner: QueryRunner, include = true) { + /*------------------------------------- + ---------------- ApiKey --------------- + --------------------------------------*/ + // step 1 - create temp table with workspaceId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_apikey" ( + "id" varchar PRIMARY KEY NOT NULL, + "apiKey" varchar NOT NULL, + "apiSecret" varchar NOT NULL, + "keyName" varchar NOT NULL, + "createdDate" datetime NOT NULL DEFAULT (datetime('now')), + "updatedDate" datetime NOT NULL DEFAULT (datetime('now')), + "workspaceId" varchar, + FOREIGN KEY ("workspaceId") REFERENCES "workspace"("id") + ); + `) + + // step 2 - create index for workspaceId in temp_apikey table + await queryRunner.query(`CREATE INDEX IF NOT EXISTS "idx_apikey_workspaceId" ON "temp_apikey"("workspaceId");`) + + // step 3 - migrate data + await queryRunner.query(` + INSERT INTO "temp_apikey" ("id", "apiKey", "apiSecret", "keyName", "createdDate", "updatedDate", "workspaceId") + SELECT "id", "apiKey", "apiSecret", "keyName", "updatedDate", "updatedDate", "workspaceId" FROM "apikey"; + `) + + // step 4 - drop apikey table + await queryRunner.query(`DROP TABLE "apikey";`) + + // step 5 - alter temp_apikey to apikey table + await queryRunner.query(`ALTER TABLE "temp_apikey" RENAME TO "apikey";`) + + /*------------------------------------- + ---------------- User --------------- + --------------------------------------*/ + if (include) { + // step 1 - create temp table with activeWorkspaceId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_user" ( + "id" varchar PRIMARY KEY NOT NULL, + "role" varchar NOT NULL, + "name" varchar, + "credential" text, + "tempToken" text, + "tokenExpiry" datetime, + "email" varchar NOT NULL, + "status" varchar NOT NULL, + "lastLogin" datetime, + "activeWorkspaceId" varchar NOT NULL, + FOREIGN KEY ("activeWorkspaceId") REFERENCES "workspace"("id") + ); + `) + + // step 2 - create index for activeWorkspaceId in temp_user table + await queryRunner.query(`CREATE INDEX IF NOT EXISTS "idx_user_activeWorkspaceId" ON "temp_user"("activeWorkspaceId");`) + + // step 3 - migrate data + await queryRunner.query(` + INSERT INTO "temp_user" ("id", "role", "name", "credential", "tempToken", "tokenExpiry", "email", "status", "lastLogin", "activeWorkspaceId") + SELECT "id", "role", "name", "credential", "tempToken", "tokenExpiry", "email", "status", "lastLogin", "activeWorkspaceId" FROM "user"; + `) + + // step 4 - drop user table + await queryRunner.query(`DROP TABLE "user";`) + + // step 5 - alter temp_user to user table + await queryRunner.query(`ALTER TABLE "temp_user" RENAME TO "user";`) + } + + /*---------------------------------------------- + ---------------- Workspace Users --------------- + ------------------------------------------------*/ + + if (include) { + // step 1 - create temp table with workspaceId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_workspace_users" ( + "id" varchar PRIMARY KEY NOT NULL, + "workspaceId" varchar NOT NULL, + "userId" varchar NOT NULL, + "role" varchar NOT NULL, + FOREIGN KEY ("workspaceId") REFERENCES "workspace"("id") + ); + `) + + // step 2 - create index for workspaceId in temp_workspace_users table + await queryRunner.query(`CREATE INDEX IF NOT EXISTS "idx_workspace_users_workspaceId" ON "temp_workspace_users"("workspaceId");`) + + // step 3 - migrate data + await queryRunner.query(` + INSERT INTO "temp_workspace_users" ("id", "workspaceId", "userId", "role") + SELECT "id", "workspaceId", "userId", "role" FROM "workspace_users"; + `) + + // step 4 - drop workspace_users table + await queryRunner.query(`DROP TABLE "workspace_users";`) + + // step 5 - alter temp_workspace_users to workspace_users table + await queryRunner.query(`ALTER TABLE "temp_workspace_users" RENAME TO "workspace_users";`) + } + + /*---------------------------------------------- + ---------------- Chatflow ---------------------- + ------------------------------------------------*/ + + // step 1 - create temp table with workspaceId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_chat_flow" ( + "id" varchar PRIMARY KEY NOT NULL, + "name" varchar NOT NULL, + "flowData" text NOT NULL, + "deployed" boolean, + "isPublic" boolean, + "apikeyid" varchar, + "chatbotConfig" text, + "createdDate" datetime NOT NULL DEFAULT (datetime('now')), + "updatedDate" datetime NOT NULL DEFAULT (datetime('now')), + "apiConfig" TEXT, + "analytic" TEXT, + "category" TEXT, + "speechToText" TEXT, + "type" TEXT, + "workspaceId" TEXT, + "followUpPrompts" TEXT, + FOREIGN KEY ("workspaceId") REFERENCES "workspace"("id") + ); + `) + + // step 2 - create index for workspaceId in temp_chat_flow table + await queryRunner.query(`CREATE INDEX IF NOT EXISTS "idx_chat_flow_workspaceId" ON "temp_chat_flow"("workspaceId");`) + + // step 3 - migrate data + await queryRunner.query(` + INSERT INTO "temp_chat_flow" ("id", "name", "flowData", "deployed", "isPublic", "apikeyid", "chatbotConfig", "createdDate", "updatedDate", "apiConfig", "analytic", "category", "speechToText", "type", "workspaceId", "followUpPrompts") + SELECT "id", "name", "flowData", "deployed", "isPublic", "apikeyid", "chatbotConfig", "createdDate", "updatedDate", "apiConfig", "analytic", "category", "speechToText", "type", "workspaceId", "followUpPrompts" FROM "chat_flow"; + `) + + // step 4 - drop chat_flow table + await queryRunner.query(`DROP TABLE "chat_flow";`) + + // step 5 - alter temp_chat_flow to chat_flow table + await queryRunner.query(`ALTER TABLE "temp_chat_flow" RENAME TO "chat_flow";`) + + /*---------------------------------------------- + ---------------- Tool -------------------------- + ------------------------------------------------*/ + + // step 1 - create temp table with workspaceId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_tool" ( + "id" varchar PRIMARY KEY NOT NULL, + "name" varchar NOT NULL, + "description" text NOT NULL, + "color" varchar NOT NULL, + "iconSrc" varchar, + "schema" text, + "func" text, + "createdDate" datetime NOT NULL DEFAULT (datetime('now')), + "updatedDate" datetime NOT NULL DEFAULT (datetime('now')), + "workspaceId" TEXT, + FOREIGN KEY ("workspaceId") REFERENCES "workspace"("id") + ); + `) + + // step 2 - create index for workspaceId in temp_tool table + await queryRunner.query(`CREATE INDEX IF NOT EXISTS "idx_tool_workspaceId" ON "temp_tool"("workspaceId");`) + + // step 3 - migrate data + await queryRunner.query(` + INSERT INTO "temp_tool" ("id", "name", "description", "color", "iconSrc", "schema", "func", "createdDate", "updatedDate", "workspaceId") + SELECT "id", "name", "description", "color", "iconSrc", "schema", "func", "createdDate", "updatedDate", "workspaceId" FROM "tool"; + `) + + // step 4 - drop tool table + await queryRunner.query(`DROP TABLE "tool";`) + + // step 5 - alter temp_tool to tool table + await queryRunner.query(`ALTER TABLE "temp_tool" RENAME TO "tool";`) + + /*---------------------------------------------- + ---------------- Assistant ---------------------- + ------------------------------------------------*/ + + // step 1 - create temp table with workspaceId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_assistant" ( + "id" varchar PRIMARY KEY NOT NULL, + "details" text NOT NULL, + "credential" varchar NOT NULL, + "iconSrc" varchar, + "createdDate" datetime NOT NULL DEFAULT (datetime('now')), + "updatedDate" datetime NOT NULL DEFAULT (datetime('now')), + "workspaceId" TEXT, + FOREIGN KEY ("workspaceId") REFERENCES "workspace"("id") + ); + `) + + // step 2 - create index for workspaceId in temp_assistant table + await queryRunner.query(`CREATE INDEX IF NOT EXISTS "idx_assistant_workspaceId" ON "temp_assistant"("workspaceId");`) + + // step 3 - migrate data + await queryRunner.query(` + INSERT INTO "temp_assistant" ("id", "details", "credential", "iconSrc", "createdDate", "updatedDate", "workspaceId") + SELECT "id", "details", "credential", "iconSrc", "createdDate", "updatedDate", "workspaceId" FROM "assistant"; + `) + + // step 4 - drop assistant table + await queryRunner.query(`DROP TABLE "assistant";`) + + // step 5 - alter temp_assistant to assistant table + await queryRunner.query(`ALTER TABLE "temp_assistant" RENAME TO "assistant";`) + + /*---------------------------------------------- + ---------------- Credential ---------------------- + ------------------------------------------------*/ + + // step 1 - create temp table with workspaceId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_credential" ( + "id" varchar PRIMARY KEY NOT NULL, + "name" varchar NOT NULL, + "credentialName" varchar NOT NULL, + "encryptedData" text NOT NULL, + "createdDate" datetime NOT NULL DEFAULT (datetime('now')), + "updatedDate" datetime NOT NULL DEFAULT (datetime('now')), + "workspaceId" TEXT, + FOREIGN KEY ("workspaceId") REFERENCES "workspace"("id") + ); + `) + + // step 2 - create index for workspaceId in temp_credential table + await queryRunner.query(`CREATE INDEX IF NOT EXISTS "idx_credential_workspaceId" ON "temp_credential"("workspaceId");`) + + // step 3 - migrate data + await queryRunner.query(` + INSERT INTO "temp_credential" ("id", "name", "credentialName", "encryptedData", "createdDate", "updatedDate", "workspaceId") + SELECT "id", "name", "credentialName", "encryptedData", "createdDate", "updatedDate", "workspaceId" FROM "credential"; + `) + + // step 4 - drop credential table + await queryRunner.query(`DROP TABLE "credential";`) + + // step 5 - alter temp_credential to credential table + await queryRunner.query(`ALTER TABLE "temp_credential" RENAME TO "credential";`) + + /*--------------------------------------------------- + ---------------- Document Store ---------------------- + -----------------------------------------------------*/ + + // step 1 - create temp table with workspaceId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_document_store" ( + "id" varchar PRIMARY KEY NOT NULL, + "name" varchar NOT NULL, + "description" varchar, + "status" varchar NOT NULL, + "loaders" text, + "whereUsed" text, + "updatedDate" datetime NOT NULL DEFAULT (datetime('now')), + "createdDate" datetime NOT NULL DEFAULT (datetime('now')), + "vectorStoreConfig" TEXT, + "embeddingConfig" TEXT, + "recordManagerConfig" TEXT, + "workspaceId" TEXT, + FOREIGN KEY ("workspaceId") REFERENCES "workspace"("id") + ); + `) + + // step 2 - create index for workspaceId in temp_document_store table + await queryRunner.query(`CREATE INDEX IF NOT EXISTS "idx_document_store_workspaceId" ON "temp_document_store"("workspaceId");`) + + // step 3 - migrate data + await queryRunner.query(` + INSERT INTO "temp_document_store" ("id", "name", "description", "status", "loaders", "whereUsed", "updatedDate", "createdDate", "vectorStoreConfig", "embeddingConfig", "recordManagerConfig", "workspaceId") + SELECT "id", "name", "description", "status", "loaders", "whereUsed", "updatedDate", "createdDate", "vectorStoreConfig", "embeddingConfig", "recordManagerConfig", "workspaceId" FROM "document_store"; + `) + + // step 4 - drop document_store table + await queryRunner.query(`DROP TABLE "document_store";`) + + // step 5 - alter temp_document_store to document_store table + await queryRunner.query(`ALTER TABLE "temp_document_store" RENAME TO "document_store";`) + + /*--------------------------------------------------- + ---------------- Evaluation ------------------------- + -----------------------------------------------------*/ + + // step 1 - create temp table with workspaceId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_evaluation" ( + "id" varchar PRIMARY KEY NOT NULL, + "name" varchar NOT NULL, + "chatflowId" text NOT NULL, + "chatflowName" text NOT NULL, + "datasetId" varchar NOT NULL, + "datasetName" varchar NOT NULL, + "additionalConfig" text, + "status" varchar NOT NULL, + "evaluationType" varchar, + "average_metrics" text, + "runDate" datetime NOT NULL DEFAULT (datetime('now')), + "workspaceId" TEXT, + FOREIGN KEY ("workspaceId") REFERENCES "workspace"("id") + ); + `) + + // step 2 - create index for workspaceId in temp_evaluation table + await queryRunner.query(`CREATE INDEX IF NOT EXISTS "idx_evaluation_workspaceId" ON "temp_evaluation"("workspaceId");`) + + // step 3 - migrate data + await queryRunner.query(` + INSERT INTO "temp_evaluation" ("id", "name", "chatflowId", "chatflowName", "datasetId", "datasetName", "additionalConfig", "status", "evaluationType", "average_metrics", "runDate", "workspaceId") + SELECT "id", "name", "chatflowId", "chatflowName", "datasetId", "datasetName", "additionalConfig", "status", "evaluationType", "average_metrics", "runDate", "workspaceId" FROM "evaluation"; + `) + + // step 4 - drop evaluation table + await queryRunner.query(`DROP TABLE "evaluation";`) + + // step 5 - alter temp_evaluation to evaluation table + await queryRunner.query(`ALTER TABLE "temp_evaluation" RENAME TO "evaluation";`) + + /*--------------------------------------------------- + ---------------- Evaluator ------------------------- + -----------------------------------------------------*/ + + // step 1 - create temp table with workspaceId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_evaluator" ( + "id" varchar PRIMARY KEY NOT NULL, + "name" text NOT NULL, + "type" varchar, + "config" text, + "createdDate" datetime NOT NULL DEFAULT (datetime('now')), + "updatedDate" datetime NOT NULL DEFAULT (datetime('now')), + "workspaceId" TEXT, + FOREIGN KEY ("workspaceId") REFERENCES "workspace"("id") + ); + `) + + // step 2 - create index for workspaceId in temp_evaluator table + await queryRunner.query(`CREATE INDEX IF NOT EXISTS "idx_evaluator_workspaceId" ON "temp_evaluator"("workspaceId");`) + + // step 3 - migrate data + await queryRunner.query(` + INSERT INTO "temp_evaluator" ("id", "name", "type", "config", "createdDate", "updatedDate", "workspaceId") + SELECT "id", "name", "type", "config", "createdDate", "updatedDate", "workspaceId" FROM "evaluator"; + `) + + // step 4 - drop evaluator table + await queryRunner.query(`DROP TABLE "evaluator";`) + + // step 5 - alter temp_evaluator to evaluator table + await queryRunner.query(`ALTER TABLE "temp_evaluator" RENAME TO "evaluator";`) + + /*--------------------------------------------------- + ---------------- Dataset ------------------------- + -----------------------------------------------------*/ + + // step 1 - create temp table with workspaceId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_dataset" ( + "id" varchar PRIMARY KEY NOT NULL, + "name" text NOT NULL, + "description" varchar, + "createdDate" datetime NOT NULL DEFAULT (datetime('now')), + "updatedDate" datetime NOT NULL DEFAULT (datetime('now')), + "workspaceId" TEXT, + FOREIGN KEY ("workspaceId") REFERENCES "workspace"("id") + ); + `) + + // step 2 - create index for workspaceId in temp_dataset table + await queryRunner.query(`CREATE INDEX IF NOT EXISTS "idx_dataset_workspaceId" ON "temp_dataset"("workspaceId");`) + + // step 3 - migrate data + await queryRunner.query(` + INSERT INTO "temp_dataset" ("id", "name", "description", "createdDate", "updatedDate", "workspaceId") + SELECT "id", "name", "description", "createdDate", "updatedDate", "workspaceId" FROM "dataset"; + `) + + // step 4 - drop dataset table + await queryRunner.query(`DROP TABLE "dataset";`) + + // step 5 - alter temp_dataset to dataset table + await queryRunner.query(`ALTER TABLE "temp_dataset" RENAME TO "dataset";`) + + /*--------------------------------------------------- + ---------------- Variable --------------------------- + -----------------------------------------------------*/ + + // step 1 - create temp table with workspaceId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_variable" ( + "id" varchar PRIMARY KEY NOT NULL, + "name" text NOT NULL, + "value" text NOT NULL, + "type" varchar, + "createdDate" datetime NOT NULL DEFAULT (datetime('now')), + "updatedDate" datetime NOT NULL DEFAULT (datetime('now')), + "workspaceId" TEXT, + FOREIGN KEY ("workspaceId") REFERENCES "workspace"("id") + ); + `) + + // step 2 - create index for workspaceId in temp_variable table + await queryRunner.query(`CREATE INDEX IF NOT EXISTS "idx_variable_workspaceId" ON "temp_variable"("workspaceId");`) + + // step 3 - migrate data + await queryRunner.query(` + INSERT INTO "temp_variable" ("id", "name", "value", "type", "createdDate", "updatedDate", "workspaceId") + SELECT "id", "name", "value", "type", "createdDate", "updatedDate", "workspaceId" FROM "variable"; + `) + + // step 4 - drop variable table + await queryRunner.query(`DROP TABLE "variable";`) + + // step 5 - alter temp_variable to variable table + await queryRunner.query(`ALTER TABLE "temp_variable" RENAME TO "variable";`) + + /*--------------------------------------------------- + ---------------- Workspace Shared ------------------- + -----------------------------------------------------*/ + + // step 1 - create temp table with workspaceId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_workspace_shared" ( + "id" varchar PRIMARY KEY NOT NULL, + "workspaceId" varchar NOT NULL, + "sharedItemId" varchar NOT NULL, + "itemType" varchar NOT NULL, + "createdDate" datetime NOT NULL DEFAULT (datetime('now')), + "updatedDate" datetime NOT NULL DEFAULT (datetime('now')), + FOREIGN KEY ("workspaceId") REFERENCES "workspace"("id") + ); + `) + + // step 2 - create index for workspaceId in temp_workspace_shared table + await queryRunner.query(`CREATE INDEX IF NOT EXISTS "idx_workspace_shared_workspaceId" ON "temp_workspace_shared"("workspaceId");`) + + // step 3 - migrate data + await queryRunner.query(` + INSERT INTO "temp_workspace_shared" ("id", "workspaceId", "sharedItemId", "itemType", "createdDate", "updatedDate") + SELECT "id", "workspaceId", "sharedItemId", "itemType", "createdDate", "updatedDate" FROM "workspace_shared"; + `) + + // step 4 - drop workspace_shared table + await queryRunner.query(`DROP TABLE "workspace_shared";`) + + // step 5 - alter temp_workspace_shared to workspace_shared table + await queryRunner.query(`ALTER TABLE "temp_workspace_shared" RENAME TO "workspace_shared";`) + + /*--------------------------------------------------- + ---------------- Custom Template ------------------- + -----------------------------------------------------*/ + + // step 1 - create temp table with workspaceId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_custom_template" ( + "id" varchar PRIMARY KEY NOT NULL, + "name" varchar NOT NULL, + "flowData" text NOT NULL, + "description" varchar, + "badge" varchar, + "framework" varchar, + "usecases" varchar, + "type" varchar, + "updatedDate" datetime NOT NULL DEFAULT (datetime('now')), + "createdDate" datetime NOT NULL DEFAULT (datetime('now')), + "workspaceId" TEXT, + FOREIGN KEY ("workspaceId") REFERENCES "workspace"("id") + ); + `) + + // step 2 - create index for workspaceId in temp_custom_template table + await queryRunner.query(`CREATE INDEX IF NOT EXISTS "idx_custom_template_workspaceId" ON "temp_custom_template"("workspaceId");`) + + // step 3 - migrate data + await queryRunner.query(` + INSERT INTO "temp_custom_template" ("id", "name", "flowData", "description", "badge", "framework", "usecases", "type", "updatedDate", "createdDate", "workspaceId") + SELECT "id", "name", "flowData", "description", "badge", "framework", "usecases", "type", "updatedDate", "createdDate", "workspaceId" FROM "custom_template"; + `) + + // step 4 - drop custom_template table + await queryRunner.query(`DROP TABLE "custom_template";`) + + // step 5 - alter temp_custom_template to custom_template table + await queryRunner.query(`ALTER TABLE "temp_custom_template" RENAME TO "custom_template";`) +} + +export class LinkWorkspaceId1729130948686 implements MigrationInterface { + name = 'LinkWorkspaceId1729130948686' + + public async up(queryRunner: QueryRunner): Promise { + await linkWorkspaceId(queryRunner) + } + + public async down(queryRunner: QueryRunner): Promise { + // step 1 - create temp table without workspaceId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_apikey" ( + "id" varchar PRIMARY KEY NOT NULL, + "apiKey" varchar, + "apiSecret" varchar NOT NULL, + "keyName" varchar NOT NULL, + "updatedDate" datetime NOT NULL DEFAULT (datetime('now')), + "workspaceId" varchar + ); + `) + + // step 2 - migrate data + await queryRunner.query(` + INSERT INTO "temp_apikey" ("id", "apiKey", "apiSecret", "keyName", "updatedDate") + SELECT "id", "apiKey", "apiSecret", "keyName", "updatedDate" FROM "apikey"; + `) + + // step 3 - drop apikey table + await queryRunner.query(`DROP TABLE "apikey";`) + + // step 4 - alter temp_apikey to apiKey table + await queryRunner.query(`ALTER TABLE "temp_apikey" RENAME TO "apikey";`) + + // step 1 - create temp table without activeWorkspaceId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_user" ( + "id" varchar PRIMARY KEY NOT NULL, + "role" varchar NOT NULL, + "name" varchar, + "credential" text, + "tempToken" text, + "tokenExpiry" datetime, + "email" varchar NOT NULL, + "status" varchar NOT NULL, + "activeWorkspaceId" varchar NOT NULL, + "lastLogin" datetime + ); + `) + + // step 2 - migrate data + await queryRunner.query(` + INSERT INTO "temp_user" ("id", "role", "name", "credential", "tempToken", "tokenExpiry", "email", "status", "lastLogin", "activeWorkspaceId") + SELECT "id", "role", "name", "credential", "tempToken", "tokenExpiry", "email", "status", "lastLogin", "activeWorkspaceId" FROM "user"; + `) + + // step 3 - drop user table + await queryRunner.query(`DROP TABLE "user";`) + + // step 4 - alter temp_user to user table + await queryRunner.query(`ALTER TABLE "temp_user" RENAME TO "user";`) + + // step 1 - create temp table without workspaceId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_workspace_users" ( + "id" varchar PRIMARY KEY NOT NULL, + "workspaceId" varchar NOT NULL, + "userId" varchar NOT NULL, + "role" varchar NOT NULL + ); + `) + + // step 2 - migrate data + await queryRunner.query(` + INSERT INTO "temp_workspace_users" ("id", "workspaceId", "userId", "role") + SELECT "id", "workspaceId", "userId", "role" FROM "workspace_users"; + `) + + // step 3 - drop workspace_users table + await queryRunner.query(`DROP TABLE "workspace_users";`) + + // step 4 - alter temp_workspace_users to workspace_users table + await queryRunner.query(`ALTER TABLE "temp_workspace_users" RENAME TO "workspace_users";`) + + // step 1 - create temp table without workspaceId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_chat_flow" ( + "id" varchar PRIMARY KEY NOT NULL, + "name" varchar NOT NULL, + "flowData" text NOT NULL, + "deployed" boolean, + "isPublic" boolean, + "apikeyid" varchar, + "chatbotConfig" text, + "createdDate" datetime NOT NULL DEFAULT (datetime('now')), + "updatedDate" datetime NOT NULL DEFAULT (datetime('now')), + "apiConfig" TEXT, + "analytic" TEXT, + "category" TEXT, + "speechToText" TEXT, + "type" TEXT, + "workspaceId" TEXT, + "followUpPrompts" TEXT + ); + `) + + // step 2 - migrate data + await queryRunner.query(` + INSERT INTO "temp_chat_flow" ("id", "name", "flowData", "deployed", "isPublic", "apikeyid", "chatbotConfig", "createdDate", "updatedDate", "apiConfig", "analytic", "category", "speechToText", "type", "workspaceId", "followUpPrompts") + SELECT "id", "name", "flowData", "deployed", "isPublic", "apikeyid", "chatbotConfig", "createdDate", "updatedDate", "apiConfig", "analytic", "category", "speechToText", "type", "workspaceId", "followUpPrompts" FROM "chat_flow"; + `) + + // step 3 - drop chat_flow table + await queryRunner.query(`DROP TABLE "chat_flow";`) + + // step 4 - alter temp_chat_flow to chat_flow table + await queryRunner.query(`ALTER TABLE "temp_chat_flow" RENAME TO "chat_flow";`) + + // step 1 - create temp table without workspaceId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_tool" ( + "id" varchar PRIMARY KEY NOT NULL, + "name" varchar NOT NULL, + "description" text NOT NULL, + "color" varchar NOT NULL, + "iconSrc" varchar, + "schema" text, + "func" text, + "createdDate" datetime NOT NULL DEFAULT (datetime('now')), + "updatedDate" datetime NOT NULL DEFAULT (datetime('now')), + "workspaceId" TEXT + ); + `) + + // step 2 - migrate data + await queryRunner.query(` + INSERT INTO "temp_tool" ("id", "name", "description", "color", "iconSrc", "schema", "func", "createdDate", "updatedDate", "workspaceId") + SELECT "id", "name", "description", "color", "iconSrc", "schema", "func", "createdDate", "updatedDate", "workspaceId" FROM "tool"; + `) + + // step 3 - drop tool table + await queryRunner.query(`DROP TABLE "tool";`) + + // step 4 - alter temp_tool to tool table + await queryRunner.query(`ALTER TABLE "temp_tool" RENAME TO "tool";`) + + // step 1 - create temp table without workspaceId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_assistant" ( + "id" varchar PRIMARY KEY NOT NULL, + "details" text NOT NULL, + "credential" varchar NOT NULL, + "iconSrc" varchar, + "createdDate" datetime NOT NULL DEFAULT (datetime('now')), + "updatedDate" datetime NOT NULL DEFAULT (datetime('now')), + "workspaceId" TEXT + ); + `) + + // step 2 - migrate data + await queryRunner.query(` + INSERT INTO "temp_assistant" ("id", "details", "credential", "iconSrc", "createdDate", "updatedDate", "workspaceId") + SELECT "id", "details", "credential", "iconSrc", "createdDate", "updatedDate", "workspaceId" FROM "assistant"; + `) + + // step 3 - drop assistant table + await queryRunner.query(`DROP TABLE "assistant";`) + + // step 4 - alter temp_assistant to assistant table + await queryRunner.query(`ALTER TABLE "temp_assistant" RENAME TO "assistant";`) + + // step 1 - create temp table without workspaceId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_credential" ( + "id" varchar PRIMARY KEY NOT NULL, + "name" varchar NOT NULL, + "credentialName" varchar NOT NULL, + "encryptedData" text NOT NULL, + "createdDate" datetime NOT NULL DEFAULT (datetime('now')), + "updatedDate" datetime NOT NULL DEFAULT (datetime('now')), + "workspaceId" TEXT + ); + `) + + // step 2 - migrate data + await queryRunner.query(` + INSERT INTO "temp_credential" ("id", "name", "credentialName", "encryptedData", "createdDate", "updatedDate", "workspaceId") + SELECT "id", "name", "credentialName", "encryptedData", "createdDate", "updatedDate", "workspaceId" FROM "credential"; + `) + + // step 3 - drop credential table + await queryRunner.query(`DROP TABLE "credential";`) + + // step 4 - alter temp_credential to credential table + await queryRunner.query(`ALTER TABLE "temp_credential" RENAME TO "credential";`) + + // step 1 - create temp table without workspaceId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_document_store" ( + "id" varchar PRIMARY KEY NOT NULL, + "name" varchar NOT NULL, + "description" varchar, + "status" varchar NOT NULL, + "loaders" text, + "whereUsed" text, + "updatedDate" datetime NOT NULL DEFAULT (datetime('now')), + "createdDate" datetime NOT NULL DEFAULT (datetime('now')), + "vectorStoreConfig" TEXT, + "embeddingConfig" TEXT, + "recordManagerConfig" TEXT, + "workspaceId" TEXT, + ); + `) + + // step 2 - migrate data + await queryRunner.query(` + INSERT INTO "temp_document_store" ("id", "name", "description", "status", "loaders", "whereUsed", "updatedDate", "createdDate", "vectorStoreConfig", "embeddingConfig", "recordManagerConfig", "workspaceId") + SELECT "id", "name", "description", "status", "loaders", "whereUsed", "updatedDate", "createdDate", "vectorStoreConfig", "embeddingConfig", "recordManagerConfig", "workspaceId" FROM "document_store"; + `) + + // step 3 - drop document_store table + await queryRunner.query(`DROP TABLE "document_store";`) + + // step 4 - alter temp_document_store to document_store table + await queryRunner.query(`ALTER TABLE "temp_document_store" RENAME TO "document_store";`) + + // step 1 - create temp table without workspaceId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_evaluation" ( + "id" varchar PRIMARY KEY NOT NULL, + "name" varchar NOT NULL, + "chatflowId" text NOT NULL, + "chatflowName" text NOT NULL, + "datasetId" varchar NOT NULL, + "datasetName" varchar NOT NULL, + "additionalConfig" text, + "status" varchar NOT NULL, + "evaluationType" varchar, + "average_metrics" text, + "runDate" datetime NOT NULL DEFAULT (datetime('now')), + "workspaceId" TEXT + ); + `) + + // step 2 - migrate data + await queryRunner.query(` + INSERT INTO "temp_evaluation" ("id", "name", "chatflowId", "chatflowName", "datasetId", "datasetName", "additionalConfig", "status", "evaluationType", "average_metrics", "runDate", "workspaceId") + SELECT "id", "name", "chatflowId", "chatflowName", "datasetId", "datasetName", "additionalConfig", "status", "evaluationType", "average_metrics", "runDate", "workspaceId" FROM "evaluation"; + `) + + // step 3 - drop evaluation table + await queryRunner.query(`DROP TABLE "evaluation";`) + + // step 4 - alter temp_evaluation to evaluation table + await queryRunner.query(`ALTER TABLE "temp_evaluation" RENAME TO "evaluation";`) + + // step 1 - create temp table without workspaceId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_evaluator" ( + "id" varchar PRIMARY KEY NOT NULL, + "name" text NOT NULL, + "type" varchar, + "config" text, + "createdDate" datetime NOT NULL DEFAULT (datetime('now')), + "updatedDate" datetime NOT NULL DEFAULT (datetime('now')), + "workspaceId" TEXT + ); + `) + + // step 2 - migrate data + await queryRunner.query(` + INSERT INTO "temp_evaluator" ("id", "name", "type", "config", "createdDate", "updatedDate", "workspaceId") + SELECT "id", "name", "type", "config", "createdDate", "updatedDate", "workspaceId" FROM "evaluator"; + `) + + // step 3 - drop evaluator table + await queryRunner.query(`DROP TABLE "evaluator";`) + + // step 4 - alter temp_evaluator to evaluator table + await queryRunner.query(`ALTER TABLE "temp_evaluator" RENAME TO "evaluator";`) + + // step 1 - create temp table without workspaceId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_dataset" ( + "id" varchar PRIMARY KEY NOT NULL, + "name" text NOT NULL, + "description" varchar, + "createdDate" datetime NOT NULL DEFAULT (datetime('now')), + "updatedDate" datetime NOT NULL DEFAULT (datetime('now')), + "workspaceId" TEXT + ); + `) + + // step 2 - migrate data + await queryRunner.query(` + INSERT INTO "temp_dataset" ("id", "name", "description", "createdDate", "updatedDate", "workspaceId") + SELECT "id", "name", "description", "createdDate", "updatedDate", "workspaceId" FROM "dataset"; + `) + + // step 3 - drop dataset table + await queryRunner.query(`DROP TABLE "dataset";`) + + // step 4 - alter temp_dataset to dataset table + await queryRunner.query(`ALTER TABLE "temp_dataset" RENAME TO "dataset";`) + + // step 1 - create temp table without workspaceId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_variable" ( + "id" varchar PRIMARY KEY NOT NULL, + "name" text NOT NULL, + "value" text NOT NULL, + "type" varchar, + "createdDate" datetime NOT NULL DEFAULT (datetime('now')), + "updatedDate" datetime NOT NULL DEFAULT (datetime('now')), + "workspaceId" TEXT + ); + `) + + // step 2 - migrate data + await queryRunner.query(` + INSERT INTO "temp_variable" ("id", "name", "value", "type", "createdDate", "updatedDate", "workspaceId") + SELECT "id", "name", "value", "type", "createdDate", "updatedDate", "workspaceId" FROM "variable"; + `) + + // step 3 - drop variable table + await queryRunner.query(`DROP TABLE "variable";`) + + // step 4 - alter temp_variable to variable table + await queryRunner.query(`ALTER TABLE "temp_variable" RENAME TO "variable";`) + + // step 1 - create temp table without workspaceId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_workspace_shared" ( + "id" varchar PRIMARY KEY NOT NULL, + "workspaceId" varchar NOT NULL, + "sharedItemId" varchar NOT NULL, + "itemType" varchar NOT NULL, + "createdDate" datetime NOT NULL DEFAULT (datetime('now')), + "updatedDate" datetime NOT NULL DEFAULT (datetime('now')) + ); + `) + + // step 2 - migrate data + await queryRunner.query(` + INSERT INTO "temp_workspace_shared" ("id", "workspaceId", "sharedItemId", "itemType", "createdDate", "updatedDate") + SELECT "id", "workspaceId", "sharedItemId", "itemType", "createdDate", "updatedDate" FROM "workspace_shared"; + `) + + // step 3 - drop workspace_shared table + await queryRunner.query(`DROP TABLE "workspace_shared";`) + + // step 4 - alter temp_workspace_shared to workspace_shared table + await queryRunner.query(`ALTER TABLE "temp_workspace_shared" RENAME TO "workspace_shared";`) + + // step 1 - create temp table without workspaceId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_custom_template" ( + "id" varchar PRIMARY KEY NOT NULL, + "name" varchar NOT NULL, + "flowData" text NOT NULL, + "description" varchar, + "badge" varchar, + "framework" varchar, + "usecases" varchar, + "type" varchar, + "updatedDate" datetime NOT NULL DEFAULT (datetime('now')), + "createdDate" datetime NOT NULL DEFAULT (datetime('now')), + "workspaceId" TEXT + ); + `) + + // step 2 - migrate data + await queryRunner.query(` + INSERT INTO "temp_custom_template" ("id", "name", "flowData", "description", "badge", "framework", "usecases", "type", "updatedDate", "createdDate", "workspaceId") + SELECT "id", "name", "flowData", "description", "badge", "framework", "usecases", "type", "updatedDate", "createdDate", "workspaceId" FROM "custom_template"; + `) + + // step 3 - drop custom_template table + await queryRunner.query(`DROP TABLE "custom_template";`) + + // step 4 - alter temp_custom_template to custom_template table + await queryRunner.query(`ALTER TABLE "temp_custom_template" RENAME TO "custom_template";`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/sqlite/1729133111652-LinkOrganizationId.ts b/packages/server/src/enterprise/database/migrations/sqlite/1729133111652-LinkOrganizationId.ts new file mode 100644 index 00000000000..c73e78cfa00 --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/sqlite/1729133111652-LinkOrganizationId.ts @@ -0,0 +1,61 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' + +export class LinkOrganizationId1729133111652 implements MigrationInterface { + name = 'LinkOrganizationId1729133111652' + + public async up(queryRunner: QueryRunner): Promise { + // step 1 - create temp table with organizationId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_workspace" ( + "id" varchar PRIMARY KEY NOT NULL, + "name" text NOT NULL, + "description" varchar, + "createdDate" datetime NOT NULL DEFAULT (datetime('now')), + "updatedDate" datetime NOT NULL DEFAULT (datetime('now')), + "organizationId" varchar, + FOREIGN KEY ("organizationId") REFERENCES "organization"("id") + ); + `) + + // step 2 - create index for organizationId in temp_workspace table + await queryRunner.query(`CREATE INDEX IF NOT EXISTS "idx_workspace_organizationId" ON "temp_workspace"("organizationId");`) + + // step 3 - migrate data + await queryRunner.query(` + INSERT INTO "temp_workspace" ("id", "name", "description", "createdDate", "updatedDate", "organizationId") + SELECT "id", "name", "description", "createdDate", "updatedDate", "organizationId" FROM "workspace"; + `) + + // step 4 - drop workspace table + await queryRunner.query(`DROP TABLE "workspace";`) + + // step 5 - alter temp_workspace to workspace table + await queryRunner.query(`ALTER TABLE "temp_workspace" RENAME TO "workspace";`) + } + + public async down(queryRunner: QueryRunner): Promise { + // step 1 - create temp table without organizationId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_workspace" ( + "id" varchar PRIMARY KEY NOT NULL, + "name" text NOT NULL, + "description" varchar, + "createdDate" datetime NOT NULL DEFAULT (datetime('now')), + "updatedDate" datetime NOT NULL DEFAULT (datetime('now')), + "organizationId" varchar, + ); + `) + + // step 2 - migrate data + await queryRunner.query(` + INSERT INTO "temp_workspace" ("id", "name", "description", "createdDate", "updatedDate", "organizationId") + SELECT "id", "name", "description", "createdDate", "updatedDate", "organizationId" FROM "workspace"; + `) + + // step 3 - drop workspace table + await queryRunner.query(`DROP TABLE "workspace";`) + + // step 4 - alter temp_workspace to workspace table + await queryRunner.query(`ALTER TABLE "temp_workspace" RENAME TO "workspace";`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/sqlite/1730519457880-AddSSOColumns.ts b/packages/server/src/enterprise/database/migrations/sqlite/1730519457880-AddSSOColumns.ts new file mode 100644 index 00000000000..da61bb87c9c --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/sqlite/1730519457880-AddSSOColumns.ts @@ -0,0 +1,16 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' +import { ensureColumnExists } from './sqlliteCustomFunctions' + +export class AddSSOColumns1730519457880 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await ensureColumnExists(queryRunner, 'organization', 'sso_config', 'text') + await ensureColumnExists(queryRunner, 'user', 'user_type', 'varchar') + await ensureColumnExists(queryRunner, 'login_activity', 'login_mode', 'varchar') + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE "organization" DROP COLUMN "sso_config";`) + await queryRunner.query(`ALTER TABLE "user" DROP COLUMN "user_type";`) + await queryRunner.query(`ALTER TABLE "login_activity" DROP COLUMN "login_mode";`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/sqlite/1734074497540-AddPersonalWorkspace.ts b/packages/server/src/enterprise/database/migrations/sqlite/1734074497540-AddPersonalWorkspace.ts new file mode 100644 index 00000000000..8e00d71b757 --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/sqlite/1734074497540-AddPersonalWorkspace.ts @@ -0,0 +1,28 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' +import { v4 as uuidv4 } from 'uuid' + +export class AddPersonalWorkspace1734074497540 implements MigrationInterface { + name = 'AddPersonalWorkspace1734074497540' + + public async up(queryRunner: QueryRunner): Promise { + const users = await queryRunner.query(`select * from "user";`) + const organization = await queryRunner.query(`select "id" from "organization";`) + for (let user of users) { + const workspaceDescription = 'Personal Workspace of ' + user.id + const workspaceId = uuidv4() + + await queryRunner.query(` + insert into "workspace" ("id", "name", "description", "organizationId") + values('${workspaceId}', 'Personal Workspace', '${workspaceDescription}', '${organization[0].id}'); + `) + + const workspaceusersId = uuidv4() + await queryRunner.query(` + insert into "workspace_users" ("id", "workspaceId", "userId", "role") + values('${workspaceusersId}', '${workspaceId}', '${user.id}', 'pw'); + `) + } + } + + public async down(): Promise {} +} diff --git a/packages/server/src/enterprise/database/migrations/sqlite/1737076223692-RefactorEnterpriseDatabase.ts b/packages/server/src/enterprise/database/migrations/sqlite/1737076223692-RefactorEnterpriseDatabase.ts new file mode 100644 index 00000000000..0ace2040cc1 --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/sqlite/1737076223692-RefactorEnterpriseDatabase.ts @@ -0,0 +1,476 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' +import { fixOpenSourceAssistantTable } from '../../../../database/migrations/sqlite/1743758056188-FixOpenSourceAssistantTable' +import { decrypt, encrypt } from '../../../utils/encryption.util' +import { LoginMethodStatus } from '../../entities/login-method.entity' +import { OrganizationUserStatus } from '../../entities/organization-user.entity' +import { OrganizationName } from '../../entities/organization.entity' +import { GeneralRole } from '../../entities/role.entity' +import { UserStatus } from '../../entities/user.entity' +import { WorkspaceUserStatus } from '../../entities/workspace-user.entity' +import { WorkspaceName } from '../../entities/workspace.entity' +import { linkWorkspaceId } from './1729130948686-LinkWorkspaceId' + +export class RefactorEnterpriseDatabase1737076223692 implements MigrationInterface { + name = 'RefactorEnterpriseDatabase1737076223692' + + private async modifyTable(queryRunner: QueryRunner): Promise { + /*------------------------------------- + --------------- user ----------------- + --------------------------------------*/ + // rename user table to temp_user + await queryRunner.query(`alter table "user" rename to "temp_user";`) + + // create user table + await queryRunner.query(` + create table "user" ( + "id" uuid default (lower(substr(hex(randomblob(16)), 1, 8) || '-' || substr(hex(randomblob(16)), 9, 4) || '-' || substr('1' || substr(hex(randomblob(16)), 9, 3), 1, 4) || '-' || substr('8' || substr(hex(randomblob(16)), 13, 3), 1, 4) || '-' || substr(hex(randomblob(16)), 17, 12))) primary key, + "name" varchar(100) not null, + "email" varchar(255) not null unique, + "credential" text null, + "tempToken" text null, + "tokenExpiry" timestamp null, + "status" varchar(20) default '${UserStatus.UNVERIFIED}' not null, + "createdDate" timestamp default current_timestamp not null, + "updatedDate" timestamp default current_timestamp not null, + "createdBy" uuid not null, + "updatedBy" uuid not null, + constraint "fk_createdBy" foreign key ("createdBy") references "user" ("id"), + constraint "fk_updatedBy" foreign key ("updatedBy") references "user" ("id") + ); + `) + + /*------------------------------------- + ----------- organization -------------- + --------------------------------------*/ + // rename organization table to temp_organization + await queryRunner.query(`alter table "organization" rename to "temp_organization";`) + + // create organization table + await queryRunner.query(` + create table "organization" ( + "id" uuid default (lower(substr(hex(randomblob(16)), 1, 8) || '-' || substr(hex(randomblob(16)), 9, 4) || '-' || substr('1' || substr(hex(randomblob(16)), 9, 3), 1, 4) || '-' || substr('8' || substr(hex(randomblob(16)), 13, 3), 1, 4) || '-' || substr(hex(randomblob(16)), 17, 12))) primary key, + "name" varchar(100) default '${OrganizationName.DEFAULT_ORGANIZATION}' not null, + "customerId" varchar(100) null, + "subscriptionId" varchar(100) null, + "createdDate" timestamp default current_timestamp not null, + "updatedDate" timestamp default current_timestamp not null, + "createdBy" uuid not null, + "updatedBy" uuid not null, + constraint "fk_createdBy" foreign key ("createdBy") references "user" ("id"), + constraint "fk_updatedBy" foreign key ("updatedBy") references "user" ("id") + ); + `) + + /*------------------------------------- + ----------- login method -------------- + --------------------------------------*/ + // create login_method table + await queryRunner.query(` + create table "login_method" ( + "id" uuid default (lower(substr(hex(randomblob(16)), 1, 8) || '-' || substr(hex(randomblob(16)), 9, 4) || '-' || substr('1' || substr(hex(randomblob(16)), 9, 3), 1, 4) || '-' || substr('8' || substr(hex(randomblob(16)), 13, 3), 1, 4) || '-' || substr(hex(randomblob(16)), 17, 12))) primary key, + "organizationId" uuid null, + "name" varchar(100) not null, + "config" text not null, + "status" varchar(20) default '${LoginMethodStatus.ENABLE}' not null, + "createdDate" timestamp default current_timestamp not null, + "updatedDate" timestamp default current_timestamp not null, + "createdBy" uuid null, + "updatedBy" uuid null, + constraint "fk_organizationId" foreign key ("organizationId") references "organization" ("id"), + constraint "fk_createdBy" foreign key ("createdBy") references "user" ("id"), + constraint "fk_updatedBy" foreign key ("updatedBy") references "user" ("id") + ); + `) + + /*------------------------------------- + --------------- role ------------------ + --------------------------------------*/ + // rename roles table to temp_role + await queryRunner.query(`alter table "roles" rename to "temp_role";`) + + // create organization_login_method table + await queryRunner.query(` + create table "role" ( + "id" uuid default (lower(substr(hex(randomblob(16)), 1, 8) || '-' || substr(hex(randomblob(16)), 9, 4) || '-' || substr('1' || substr(hex(randomblob(16)), 9, 3), 1, 4) || '-' || substr('8' || substr(hex(randomblob(16)), 13, 3), 1, 4) || '-' || substr(hex(randomblob(16)), 17, 12))) primary key, + "organizationId" uuid null, + "name" varchar(100) not null, + "description" text null, + "permissions" text not null, + "createdDate" timestamp default current_timestamp not null, + "updatedDate" timestamp default current_timestamp not null, + "createdBy" uuid null, + "updatedBy" uuid null, + constraint "fk_organizationId" foreign key ("organizationId") references "organization" ("id"), + constraint "fk_createdBy" foreign key ("createdBy") references "user" ("id"), + constraint "fk_updatedBy" foreign key ("updatedBy") references "user" ("id") + ); + `) + + /*------------------------------------- + ---------- organization_user ---------- + --------------------------------------*/ + // create organization_user table + await queryRunner.query(` + create table "organization_user" ( + "organizationId" uuid not null, + "userId" uuid not null, + "roleId" uuid not null, + "status" varchar(20) default '${OrganizationUserStatus.ACTIVE}' not null, + "createdDate" timestamp default current_timestamp not null, + "updatedDate" timestamp default current_timestamp not null, + "createdBy" uuid not null, + "updatedBy" uuid not null, + constraint "pk_organization_user" primary key ("organizationId", "userId"), + constraint "fk_organizationId" foreign key ("organizationId") references "organization" ("id"), + constraint "fk_userId" foreign key ("userId") references "user" ("id"), + constraint "fk_roleId" foreign key ("roleId") references "role" ("id"), + constraint "fk_createdBy" foreign key ("createdBy") references "user" ("id"), + constraint "fk_updatedBy" foreign key ("updatedBy") references "user" ("id") + ); + `) + + /*------------------------------------- + ------------- workspace --------------- + --------------------------------------*/ + // rename workspace table to temp_workspace + await queryRunner.query(`alter table "workspace" rename to "temp_workspace";`) + + // create workspace table + await queryRunner.query(` + create table "workspace" ( + "id" uuid default (lower(substr(hex(randomblob(16)), 1, 8) || '-' || substr(hex(randomblob(16)), 9, 4) || '-' || substr('1' || substr(hex(randomblob(16)), 9, 3), 1, 4) || '-' || substr('8' || substr(hex(randomblob(16)), 13, 3), 1, 4) || '-' || substr(hex(randomblob(16)), 17, 12))) primary key, + "name" varchar(100) not null, + "description" text null, + "createdDate" timestamp default current_timestamp not null, + "updatedDate" timestamp default current_timestamp not null, + "organizationId" uuid not null, + "createdBy" uuid not null, + "updatedBy" uuid not null, + constraint "fk_organizationId" foreign key ("organizationId") references "organization" ("id"), + constraint "fk_createdBy" foreign key ("createdBy") references "user" ("id"), + constraint "fk_updatedBy" foreign key ("updatedBy") references "user" ("id") + ); + `) + + /*------------------------------------- + ----------- workspace_user ------------ + --------------------------------------*/ + // rename workspace_users table to temp_workspace_user + await queryRunner.query(`alter table "workspace_users" rename to "temp_workspace_user";`) + + // create workspace_user table + await queryRunner.query(` + create table "workspace_user" ( + "workspaceId" uuid not null, + "userId" uuid not null, + "roleId" uuid not null, + "status" varchar(20) default '${WorkspaceUserStatus.INVITED}' not null, + "lastLogin" timestamp null, + "createdDate" timestamp default current_timestamp not null, + "updatedDate" timestamp default current_timestamp not null, + "createdBy" uuid not null, + "updatedBy" uuid not null, + constraint "pk_workspace_user" primary key ("workspaceId", "userId"), + constraint "fk_workspaceId" foreign key ("workspaceId") references "workspace" ("id"), + constraint "fk_userId" foreign key ("userId") references "user" ("id"), + constraint "fk_roleId" foreign key ("roleId") references "role" ("id"), + constraint "fk_createdBy" foreign key ("createdBy") references "user" ("id"), + constraint "fk_updatedBy" foreign key ("updatedBy") references "user" ("id") + ); + `) + } + + private async deleteWorkspaceWithoutUser(queryRunner: QueryRunner) { + const workspaceWithoutUser = await queryRunner.query(` + select w."id" as "id" from "workspace_user" as "wu" + right join "workspace" as "w" on "wu"."workspaceId" = "w"."id" + where "wu"."userId" is null; + `) + const workspaceIds = workspaceWithoutUser.map((workspace: { id: string }) => `'${workspace.id}'`).join(',') + + // Delete related records from other tables that reference the deleted workspaces + if (workspaceIds && workspaceIds.length > 0) { + await queryRunner.query(` + delete from "workspace_user" where "workspaceId" in (${workspaceIds}); + `) + await queryRunner.query(` + delete from "apikey" where "workspaceId" in (${workspaceIds}); + `) + await queryRunner.query(` + delete from "assistant" where "workspaceId" in (${workspaceIds}); + `) + const chatflows = await queryRunner.query(` + select id from "chat_flow" where "workspaceId" in (${workspaceIds}); + `) + const chatflowIds = chatflows.map((chatflow: { id: string }) => `'${chatflow.id}'`).join(',') + if (chatflowIds && chatflowIds.length > 0) { + await queryRunner.query(` + delete from "chat_flow" where "workspaceId" in (${workspaceIds}); + `) + await queryRunner.query(` + delete from "upsert_history" where "chatflowid" in (${chatflowIds}); + `) + await queryRunner.query(` + delete from "chat_message" where "chatflowid" in (${chatflowIds}); + `) + await queryRunner.query(` + delete from "chat_message_feedback" where "chatflowid" in (${chatflowIds}); + `) + } + await queryRunner.query(` + delete from "credential" where "workspaceId" in (${workspaceIds}); + `) + await queryRunner.query(` + delete from "custom_template" where "workspaceId" in (${workspaceIds}); + `) + const datasets = await queryRunner.query(` + select id from "dataset" where "workspaceId" in (${workspaceIds}); + `) + const datasetIds = datasets.map((dataset: { id: string }) => `'${dataset.id}'`).join(',') + if (datasetIds && datasetIds.length > 0) { + await queryRunner.query(` + delete from "dataset" where "workspaceId" in (${workspaceIds}); + `) + await queryRunner.query(` + delete from "dataset_row" where "datasetId" in (${datasetIds}); + `) + } + const documentStores = await queryRunner.query(` + select id from "document_store" where "workspaceId" in (${workspaceIds}); + `) + const documentStoreIds = documentStores.map((documentStore: { id: string }) => `'${documentStore.id}'`).join(',') + if (documentStoreIds && documentStoreIds.length > 0) { + await queryRunner.query(` + delete from "document_store" where "workspaceId" in (${workspaceIds}); + `) + await queryRunner.query(` + delete from "document_store_file_chunk" where "storeId" in (${documentStoreIds}); + `) + } + const evaluations = await queryRunner.query(` + select id from "evaluation" where "workspaceId" in (${workspaceIds}); + `) + const evaluationIds = evaluations.map((evaluation: { id: string }) => `'${evaluation.id}'`).join(',') + if (evaluationIds && evaluationIds.length > 0) { + await queryRunner.query(` + delete from "evaluation" where "workspaceId" in (${workspaceIds}); + `) + await queryRunner.query(` + delete from "evaluation_run" where "evaluationId" in (${evaluationIds}); + `) + } + await queryRunner.query(` + delete from "evaluator" where "workspaceId" in (${workspaceIds}); + `) + await queryRunner.query(` + delete from "tool" where "workspaceId" in (${workspaceIds}); + `) + await queryRunner.query(` + delete from "variable" where "workspaceId" in (${workspaceIds}); + `) + await queryRunner.query(` + delete from "workspace_shared" where "workspaceId" in (${workspaceIds}); + `) + await queryRunner.query(` + delete from "workspace" where "id" in (${workspaceIds}); + `) + } + } + + private async populateTable(queryRunner: QueryRunner): Promise { + // insert generalRole + const generalRole = [ + { + name: 'owner', + description: 'Has full control over the organization.', + permissions: '["organization","workspace"]' + }, + { + name: 'member', + description: 'Has limited control over the organization.', + permissions: '[]' + }, + { + name: 'personal workspace', + description: 'Has full control over the personal workspace', + permissions: + '[ "chatflows:view", "chatflows:create", "chatflows:update", "chatflows:duplicate", "chatflows:delete", "chatflows:export", "chatflows:import", "chatflows:config", "chatflows:domains", "agentflows:view", "agentflows:create", "agentflows:update", "agentflows:duplicate", "agentflows:delete", "agentflows:export", "agentflows:import", "agentflows:config", "agentflows:domains", "tools:view", "tools:create", "tools:update", "tools:delete", "tools:export", "assistants:view", "assistants:create", "assistants:update", "assistants:delete", "credentials:view", "credentials:create", "credentials:update", "credentials:delete", "credentials:share", "variables:view", "variables:create", "variables:update", "variables:delete", "apikeys:view", "apikeys:create", "apikeys:update", "apikeys:delete", "apikeys:import", "documentStores:view", "documentStores:create", "documentStores:update", "documentStores:delete", "documentStores:add-loader", "documentStores:delete-loader", "documentStores:preview-process", "documentStores:upsert-config", "datasets:view", "datasets:create", "datasets:update", "datasets:delete", "evaluators:view", "evaluators:create", "evaluators:update", "evaluators:delete", "evaluations:view", "evaluations:create", "evaluations:update", "evaluations:delete", "evaluations:run", "templates:marketplace", "templates:custom", "templates:custom-delete", "templates:toolexport", "templates:flowexport", "templates:custom-share", "workspace:export", "workspace:import", "executions:view", "executions:delete" ]' + } + ] + for (let role of generalRole) { + await queryRunner.query(` + insert into "role"("name", "description", "permissions") + values('${role.name}', '${role.description}', '${role.permissions}'); + `) + } + + const users = await queryRunner.query('select * from "temp_user";') + const noExistingData = users.length > 0 === false + if (noExistingData) return + + const organizations = await queryRunner.query('select * from "temp_organization";') + const organizationId = organizations[0].id + const adminUserId = organizations[0].adminUserId + const ssoConfig = organizations[0].sso_config ? JSON.parse(await decrypt(organizations[0].sso_config)).providers : [] + + /*------------------------------------- + --------------- user ----------------- + --------------------------------------*/ + // insert user with temp_user data + await queryRunner.query(` + insert into "user" ("id", "name", "email", "credential", "tempToken", "tokenExpiry", "status", "createdBy", "updatedBy") + select tu."id", coalesce(tu."name", tu."email"), tu."email", tu."credential", tu."tempToken", tu."tokenExpiry", tu."status", + '${adminUserId}', '${adminUserId}' + from "temp_user" as "tu"; + `) + + /*------------------------------------- + ----------- organization -------------- + --------------------------------------*/ + // insert organization with temp_organization data + await queryRunner.query(` + insert into "organization" ("id", "name", "createdBy", "updatedBy") + select "id", "name", "adminUserId", "adminUserId" from "temp_organization"; + `) + + /*------------------------------------- + ----------- login method -------------- + --------------------------------------*/ + // insert login_method with temp_organization data + for (let config of ssoConfig) { + const newConfigFormat = { + domain: config.domain === '' || config.domain === undefined ? undefined : config.domain, + tenantID: config.tenantID === '' || config.tenantID === undefined ? undefined : config.tenantID, + clientID: config.clientID === '' || config.clientID === undefined ? undefined : config.clientID, + clientSecret: config.clientSecret === '' || config.clientSecret === undefined ? undefined : config.clientSecret + } + const status = config.configEnabled === true ? LoginMethodStatus.ENABLE : LoginMethodStatus.DISABLE + + const allUndefined = Object.values(newConfigFormat).every((value) => value === undefined) + if (allUndefined && status === LoginMethodStatus.DISABLE) continue + const encryptData = await encrypt(JSON.stringify(newConfigFormat)) + + await queryRunner.query(` + insert into "login_method" ("organizationId", "name", "config", "status", "createdBy", "updatedBy") + values('${organizationId}','${config.providerName}','${encryptData}','${status}','${adminUserId}','${adminUserId}'); + `) + } + + /*------------------------------------- + --------------- role ------------------ + --------------------------------------*/ + // insert workspace role into role + const workspaceRole = await queryRunner.query(`select "id", "name", "description", "permissions" from "temp_role";`) + for (let role of workspaceRole) { + role.permissions = JSON.stringify(role.permissions.split(',').filter((permission: string) => permission.trim() !== '')) + const haveDescriptionQuery = `insert into "role" ("id", "organizationId", "name", "description", "permissions", "createdBy", "updatedBy") + values('${role.id}','${organizationId}','${role.name}','${role.description}','${role.permissions}','${adminUserId}','${adminUserId}');` + const noHaveDescriptionQuery = `insert into "role" ("id", "organizationId", "name", "permissions", "createdBy", "updatedBy") + values('${role.id}','${organizationId}','${role.name}','${role.permissions}','${adminUserId}','${adminUserId}');` + const insertRoleQuery = role.description ? haveDescriptionQuery : noHaveDescriptionQuery + await queryRunner.query(insertRoleQuery) + } + + /*------------------------------------- + ---------- organization_user ---------- + --------------------------------------*/ + const roles = await queryRunner.query('select * from "role";') + // insert organization_user with user, role and temp_organization data + for (let user of users) { + const roleId = + user.id === adminUserId + ? roles.find((role: any) => role.name === GeneralRole.OWNER).id + : roles.find((role: any) => role.name === GeneralRole.MEMBER).id + await queryRunner.query(` + insert into "organization_user" ("organizationId", "userId", "roleId", "status", "createdBy", "updatedBy") + values ('${organizationId}','${user.id}','${roleId}','${user.status}','${adminUserId}','${adminUserId}'); + `) + } + + /*------------------------------------- + ------------- workspace --------------- + --------------------------------------*/ + // for (let workspace of workspaces) { + // await queryRunner.query( + // `update "workspace" set "createdBy" = '${adminUserId}', "updatedBy" = '${adminUserId}' where "id" = '${workspace.id}';` + // ) + // } + + await queryRunner.query(` + insert into "workspace" ("id", "name", "description", "createdDate", "updatedDate", "organizationId", "createdBy", "updatedBy") + select "id", "name", "description", "createdDate", "updatedDate", "organizationId", '${adminUserId}', '${adminUserId}' from "temp_workspace"; + `) + + /*------------------------------------- + ----------- workspace_user ------------ + --------------------------------------*/ + const workspaces = await queryRunner.query('select * from "workspace";') + const workspaceUsers = await queryRunner.query('select * from "temp_workspace_user";') + for (let workspaceUser of workspaceUsers) { + switch (workspaceUser.role) { + case 'org_admin': + workspaceUser.role = roles.find((role: any) => role.name === GeneralRole.OWNER).id + break + case 'pw': + workspaceUser.role = roles.find((role: any) => role.name === GeneralRole.PERSONAL_WORKSPACE).id + break + default: + workspaceUser.role = roles.find((role: any) => role.name === workspaceUser.role).id + break + } + const user = users.find((user: any) => user.id === workspaceUser.userId) + const workspace = workspaces.find((workspace: any) => workspace.id === workspaceUser.workspaceId) + if (workspaceUser.workspaceId === user.activeWorkspaceId && user.lastLogin && user.status !== UserStatus.INVITED) { + const lastLogin = new Date(user.lastLogin).toISOString() + await queryRunner.query(` + insert into "workspace_user" ("workspaceId", "userId", "roleId", "status", "lastLogin","createdBy", "updatedBy") + values ('${workspaceUser.workspaceId}','${workspaceUser.userId}','${workspaceUser.role}','${WorkspaceUserStatus.ACTIVE}','${lastLogin}','${adminUserId}','${adminUserId}'); + `) + } else if (workspace.name === WorkspaceName.DEFAULT_PERSONAL_WORKSPACE && user.status === UserStatus.INVITED) { + // Skip personal workspaces for users who haven't signed up yet to avoid duplicates when they sign up. + // account.service.ts creates personal workspace during sign-up. + await queryRunner.query(` + delete from "temp_workspace_user" where "workspaceId" = '${workspaceUser.workspaceId}' and "userId" = '${workspaceUser.userId}'; + `) + await queryRunner.query(` + delete from "workspace" where "id" = '${workspaceUser.workspaceId}'; + `) + } else { + await queryRunner.query(` + insert into "workspace_user" ("workspaceId", "userId", "roleId", "status","createdBy", "updatedBy") + values ('${workspaceUser.workspaceId}','${workspaceUser.userId}','${workspaceUser.role}','${WorkspaceUserStatus.INVITED}','${adminUserId}','${adminUserId}'); + `) + } + } + + await this.deleteWorkspaceWithoutUser(queryRunner) + } + + private async deleteTempTable(queryRunner: QueryRunner): Promise { + await queryRunner.query(` + drop table "temp_workspace_user"; + `) + await queryRunner.query(` + drop table "temp_role"; + `) + await queryRunner.query(` + drop table "temp_organization"; + `) + await queryRunner.query(` + drop table "temp_user"; + `) + await queryRunner.query(` + drop table "temp_workspace"; + `) + } + + public async up(queryRunner: QueryRunner): Promise { + await this.modifyTable(queryRunner) + await this.populateTable(queryRunner) + await this.deleteTempTable(queryRunner) + await linkWorkspaceId(queryRunner, false) + await fixOpenSourceAssistantTable(queryRunner) + } + + public async down(): Promise {} +} diff --git a/packages/server/src/enterprise/database/migrations/sqlite/1746862866554-ExecutionLinkWorkspaceId.ts b/packages/server/src/enterprise/database/migrations/sqlite/1746862866554-ExecutionLinkWorkspaceId.ts new file mode 100644 index 00000000000..4cba459de64 --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/sqlite/1746862866554-ExecutionLinkWorkspaceId.ts @@ -0,0 +1,73 @@ +import { MigrationInterface, QueryRunner } from 'typeorm' +import { ensureColumnExists } from './sqlliteCustomFunctions' + +export class ExecutionLinkWorkspaceId1746862866554 implements MigrationInterface { + public async up(queryRunner: QueryRunner): Promise { + await ensureColumnExists(queryRunner, 'execution', 'workspaceId', 'TEXT') + + // step 1 - create temp table with workspaceId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_execution" ( + "id" varchar PRIMARY KEY NOT NULL, + "executionData" text NOT NULL, + "action" text, + "state" varchar NOT NULL, + "agentflowId" varchar NOT NULL, + "sessionId" varchar NOT NULL, + "isPublic" boolean, + "createdDate" datetime NOT NULL DEFAULT (datetime('now')), + "updatedDate" datetime NOT NULL DEFAULT (datetime('now')), + "stoppedDate" datetime, + "workspaceId" varchar, + FOREIGN KEY ("workspaceId") REFERENCES "workspace"("id") + ); + `) + + // step 2 - create index for workspaceId in temp_execution table + await queryRunner.query(`CREATE INDEX IF NOT EXISTS "idx_execution_workspaceId" ON "temp_execution"("workspaceId");`) + + // step 3 - migrate data + await queryRunner.query(` + INSERT INTO "temp_execution" ("id", "executionData", "action", "state", "agentflowId", "sessionId", "isPublic", "createdDate", "updatedDate", "stoppedDate") + SELECT "id", "executionData", "action", "state", "agentflowId", "sessionId", "isPublic", "createdDate", "updatedDate", "stoppedDate" FROM "execution"; + `) + + // step 4 - drop execution table + await queryRunner.query(`DROP TABLE "execution";`) + + // step 5 - alter temp_execution to execution table + await queryRunner.query(`ALTER TABLE "temp_execution" RENAME TO "execution";`) + } + + public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query(`ALTER TABLE "execution" DROP COLUMN "workspaceId";`) + + // step 1 - create temp table without workspaceId as foreign key + await queryRunner.query(` + CREATE TABLE "temp_execution" ( + "id" varchar PRIMARY KEY NOT NULL, + "executionData" text NOT NULL, + "action" text, + "state" varchar NOT NULL, + "agentflowId" varchar NOT NULL, + "sessionId" varchar NOT NULL, + "isPublic" boolean, + "createdDate" datetime NOT NULL DEFAULT (datetime('now')), + "updatedDate" datetime NOT NULL DEFAULT (datetime('now')), + "stoppedDate" datetime + ); + `) + + // step 2 - migrate data + await queryRunner.query(` + INSERT INTO "temp_execution" ("id", "executionData", "action", "state", "agentflowId", "sessionId", "isPublic", "createdDate", "updatedDate", "stoppedDate") + SELECT "id", "executionData", "action", "state", "agentflowId", "sessionId", "isPublic", "createdDate", "updatedDate", "stoppedDate" FROM "execution"; + `) + + // step 3 - drop execution table + await queryRunner.query(`DROP TABLE "execution";`) + + // step 4 - alter temp_execution to execution table + await queryRunner.query(`ALTER TABLE "temp_execution" RENAME TO "execution";`) + } +} diff --git a/packages/server/src/enterprise/database/migrations/sqlite/sqlliteCustomFunctions.ts b/packages/server/src/enterprise/database/migrations/sqlite/sqlliteCustomFunctions.ts new file mode 100644 index 00000000000..b21546f6ffc --- /dev/null +++ b/packages/server/src/enterprise/database/migrations/sqlite/sqlliteCustomFunctions.ts @@ -0,0 +1,20 @@ +import { QueryRunner } from 'typeorm' + +export const ensureColumnExists = async ( + queryRunner: QueryRunner, + tableName: string, + columnName: string, + columnType: string // Accept column type as a parameter +): Promise => { + // Retrieve column information from the specified table + const columns = await queryRunner.query(`PRAGMA table_info(${tableName});`) + + // Check if the specified column exists + const columnExists = columns.some((col: any) => col.name === columnName) + + // Check if the specified column exists in the returned columns + if (!columnExists) { + // Add the column if it does not exist + await queryRunner.query(`ALTER TABLE ${tableName} ADD COLUMN ${columnName} ${columnType};`) + } +} diff --git a/packages/server/src/enterprise/emails/verify_email_cloud.hbs b/packages/server/src/enterprise/emails/verify_email_cloud.hbs new file mode 100644 index 00000000000..5d7bd27976f --- /dev/null +++ b/packages/server/src/enterprise/emails/verify_email_cloud.hbs @@ -0,0 +1,1157 @@ + + + + FlowiseAI + + + + + + + + + + + + + + + + + + + + + +
+ + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + + + + +
+ + + + + + +
+ +
+
+
+
+ + +
+
+ + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + +
+
+ Please confirm your email +
+
+
+ Hi there! 👋, +

+ Welcome to FlowiseAI

+ To complete your registration, we need to verify your email address.

+
+
+
+
+ + +
+
+ + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + Verify Email Address + +
+
+
+
+ + +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + +
+
+

+ The FlowiseAI Team +
+
+
+
+ + +
+
+ + + + + + + + + + + + + + +
+ + + + + + +
+ +
+ +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + + + + +
+
+
+
+ + +
+ + + + + + +
+ + + + + + +
+
+   +
+
+
+
+ + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+
+
+
+ + +
+ + +
+
+ + + + +
+ + \ No newline at end of file diff --git a/packages/server/src/enterprise/emails/verify_email_cloud.html b/packages/server/src/enterprise/emails/verify_email_cloud.html new file mode 100644 index 00000000000..2a89b2af87f --- /dev/null +++ b/packages/server/src/enterprise/emails/verify_email_cloud.html @@ -0,0 +1,1274 @@ + + + + FlowiseAI + + + + + + + + + + + + + + + + + + + + + +
+ + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + + + + +
+ + + + + + +
+ +
+
+
+
+ + +
+
+ + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + +
+
+ Please confirm your email +
+
+
+ Hi there! 👋,

+ Welcome to FlowiseAI

+ To complete your registration, we need to verify your email + address.

+
+
+
+
+ + +
+
+ + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + Verify Email Address + +
+
+
+
+ + +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + +
+
+

+ The FlowiseAI Team +
+
+
+
+ + +
+
+ + + + + + + + + + + + + + +
+ + + + + + +
+ +
+ +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + + + + +
+
+
+
+ + +
+ + + + + + +
+ + + + + + +
+
+   +
+
+
+
+ + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+
+
+
+ + +
+ + +
+
+ + + + +
+ + diff --git a/packages/server/src/enterprise/emails/workspace_add_cloud.hbs b/packages/server/src/enterprise/emails/workspace_add_cloud.hbs new file mode 100644 index 00000000000..d22180b5f76 --- /dev/null +++ b/packages/server/src/enterprise/emails/workspace_add_cloud.hbs @@ -0,0 +1,1165 @@ + + + FlowiseAI + + + + + + + + + + + + + + + + + + + + + +
+ + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + + + + +
+ + + + + + +
+ +
+
+
+
+ + +
+
+ + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + +
+
+ You've been added +
to the + {{workspaceName}} + workspace. +
+
+
+ Hi there! 👋, +

+ An administrator added you to their + {{workspaceName}} + workspace.

To get started: +
    +
  1. + Click the button below to go to your FlowiseAI dashboard +
  2. +
  3. You'll get immediate access to the workspace
  4. +
+
+
+
+
+ + +
+
+ + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + Go to dashboard + +
+
+
+
+ + +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + +
+
+

+ The FlowiseAI Team +
+
+
+
+ + +
+
+ + + + + + + + + + + + + + +
+ + + + + + +
+ +
+ +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + + + + +
+
+
+
+ + +
+ + + + + + +
+ + + + + + +
+
+   +
+
+
+
+ + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+
+
+
+ + +
+ + +
+
+ + + + +
+ + \ No newline at end of file diff --git a/packages/server/src/enterprise/emails/workspace_add_cloud.html b/packages/server/src/enterprise/emails/workspace_add_cloud.html new file mode 100644 index 00000000000..f39c7bd30e1 --- /dev/null +++ b/packages/server/src/enterprise/emails/workspace_add_cloud.html @@ -0,0 +1,1279 @@ + + + + FlowiseAI + + + + + + + + + + + + + + + + + + + + + +
+ + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + + + + +
+ + + + + + +
+ +
+
+
+
+ + +
+
+ + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + +
+
+ You've been added
to the {{workspaceName}} workspace. +
+
+
+ Hi there! 👋,

+ An administrator added you to their {{workspaceName}} workspace.

To + get started: +
    +
  1. + Click the button below to go to your FlowiseAI dashboard +
  2. +
  3. You'll get immediate access to the workspace
  4. +
+
+
+
+
+ + +
+
+ + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + Go to dashboard + +
+
+
+
+ + +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + +
+
+

+ The FlowiseAI Team +
+
+
+
+ + +
+
+ + + + + + + + + + + + + + +
+ + + + + + +
+ +
+ +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + + + + +
+
+
+
+ + +
+ + + + + + +
+ + + + + + +
+
+   +
+
+
+
+ + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+
+
+
+ + +
+ + +
+
+ + + + +
+ + diff --git a/packages/server/src/enterprise/emails/workspace_new_invite_cloud.hbs b/packages/server/src/enterprise/emails/workspace_new_invite_cloud.hbs new file mode 100644 index 00000000000..5bf80b55334 --- /dev/null +++ b/packages/server/src/enterprise/emails/workspace_new_invite_cloud.hbs @@ -0,0 +1,1163 @@ + + + FlowiseAI + + + + + + + + + + + + + + + + + + + + + +
+ + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + + + + +
+ + + + + + +
+ +
+
+
+
+ + +
+
+ + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + +
+
+ You've been invited +
to the + {{workspaceName}} + workspace. +
+
+
+ Hi there! 👋, +

+ An administrator invited you to join their + {{workspaceName}} + workspace.

To get started: +
    +
  1. Click the button below to create an account
  2. +
  3. You'll get immediate access to the workspace
  4. +
+
+
+
+
+ + +
+
+ + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + Sign up for free + +
+
+
+
+ + +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + +
+
+

+ The FlowiseAI Team +
+
+
+
+ + +
+
+ + + + + + + + + + + + + + +
+ + + + + + +
+ +
+ +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + + + + +
+
+
+
+ + +
+ + + + + + +
+ + + + + + +
+
+   +
+
+
+
+ + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+
+
+
+ + +
+ + +
+
+ + + + +
+ + \ No newline at end of file diff --git a/packages/server/src/enterprise/emails/workspace_new_invite_cloud.html b/packages/server/src/enterprise/emails/workspace_new_invite_cloud.html new file mode 100644 index 00000000000..9c88d80e9c4 --- /dev/null +++ b/packages/server/src/enterprise/emails/workspace_new_invite_cloud.html @@ -0,0 +1,1277 @@ + + + + FlowiseAI + + + + + + + + + + + + + + + + + + + + + +
+ + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + + + + +
+ + + + + + +
+ +
+
+
+
+ + +
+
+ + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + +
+
+ You've been invited
to the {{workspaceName}} workspace. +
+
+
+ Hi there! 👋,

+ An administrator invited you to join their {{workspaceName}} + workspace.

To get started: +
    +
  1. Click the button below to create an account
  2. +
  3. You'll get immediate access to the workspace
  4. +
+
+
+
+
+ + +
+
+ + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + Sign up for free + +
+
+
+
+ + +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + +
+
+

+ The FlowiseAI Team +
+
+
+
+ + +
+
+ + + + + + + + + + + + + + +
+ + + + + + +
+ +
+ +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + + + + +
+
+
+
+ + +
+ + + + + + +
+ + + + + + +
+
+   +
+
+
+
+ + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+
+
+
+ + +
+ + +
+
+ + + + +
+ + diff --git a/packages/server/src/enterprise/emails/workspace_new_invite_enterprise.hbs b/packages/server/src/enterprise/emails/workspace_new_invite_enterprise.hbs new file mode 100644 index 00000000000..31b710df6a9 --- /dev/null +++ b/packages/server/src/enterprise/emails/workspace_new_invite_enterprise.hbs @@ -0,0 +1,875 @@ + + + FlowiseAI + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + +
+ + + + + + +
+ + +
+ + + + + + +
+ + + + + + + + + + +
+ + + + + + +
+ +
+
+
+
+ + +
+
+ + + + + + + + +
+ + + + + + +
+ + +
+ + + + + + +
+ + + + + + + + + + + + + + + + + +
+
You've been invited +
to the + {{workspaceName}} + workspace in your organization. +
+
+
+ Hi there! 👋, +

+ An administrator invited you to join the + {{workspaceName}} + workspace.

To get started: +
    +
  1. Click the button below to visit the login page
  2. +
  3. Sign in with your organization's SSO account or use email + and password
  4. +
  5. You'll get immediate access to the workspace
  6. +
+
+
+
+
+ + +
+
+ + + + + + + + +
+ + + + + + +
+ + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ Accept Invite +
+
+
+
+ + +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + +
+ + +
+ + + + + + +
+ + + + + + +
+
+

+ The FlowiseAI Team +
+
+
+
+ + +
+
+ + + + + + + + + + + + + + +
+ + + + + + +
+ + +
+ + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + + + + +
+
+
+
+ + +
+ + + + + + +
+ + + + + + +
+
  +
+
+
+
+ + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+
+
+
+ + +
+ + +
+
+ + + + +
+ + \ No newline at end of file diff --git a/packages/server/src/enterprise/emails/workspace_new_invite_enterprise.html b/packages/server/src/enterprise/emails/workspace_new_invite_enterprise.html new file mode 100644 index 00000000000..023770fc135 --- /dev/null +++ b/packages/server/src/enterprise/emails/workspace_new_invite_enterprise.html @@ -0,0 +1,1282 @@ + + + + FlowiseAI + + + + + + + + + + + + + + + + + + + + + +
+ + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + + + + +
+ + + + + + +
+ +
+
+
+
+ + +
+
+ + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + +
+
+ You've been invited
to the {{workspaceName}} workspace in your + organization. +
+
+
+ Hi there! 👋,

+ An administrator invited you to join the {{workspaceName}} + workspace.

To get started: +
    +
  1. Click the button below to visit the login page
  2. +
  3. + Sign in with your organization's SSO account or use email + and password +
  4. +
  5. You'll get immediate access to the workspace
  6. +
+
+
+
+
+ + +
+
+ + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + Accept Invite + +
+
+
+
+ + +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + +
+
+

+ The FlowiseAI Team +
+
+
+
+ + +
+
+ + + + + + + + + + + + + + +
+ + + + + + +
+ +
+ +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + + + + +
+
+
+
+ + +
+ + + + + + +
+ + + + + + +
+
+   +
+
+
+
+ + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+
+
+
+ + +
+ + +
+
+ + + + +
+ + diff --git a/packages/server/src/enterprise/emails/workspace_update_invite_cloud.hbs b/packages/server/src/enterprise/emails/workspace_update_invite_cloud.hbs new file mode 100644 index 00000000000..1bb83eaa823 --- /dev/null +++ b/packages/server/src/enterprise/emails/workspace_update_invite_cloud.hbs @@ -0,0 +1,1162 @@ + + + FlowiseAI + + + + + + + + + + + + + + + + + + + + + +
+ + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + + + + +
+ + + + + + +
+ +
+
+
+
+ + +
+
+ + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + +
+
+ You've been invited +
to the + {{workspaceName}} + workspace. +
+
+
+ Hi there! 👋,

+ Your invitation has been updated.

+ The administrator has modified your invitation details. Your + previous invite link is no longer valid.

To get started: +
    +
  1. Click the button below to create an account
  2. +
  3. You'll get immediate access to the workspace
  4. +
+
+
+
+
+ + +
+
+ + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + Sign up for free + +
+
+
+
+ + +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + +
+
+

+ The FlowiseAI Team +
+
+
+
+ + +
+
+ + + + + + + + + + + + + + +
+ + + + + + +
+ +
+ +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + + + + +
+
+
+
+ + +
+ + + + + + +
+ + + + + + +
+
+   +
+
+
+
+ + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+
+
+
+ + +
+ + +
+
+ + + + +
+ + \ No newline at end of file diff --git a/packages/server/src/enterprise/emails/workspace_update_invite_cloud.html b/packages/server/src/enterprise/emails/workspace_update_invite_cloud.html new file mode 100644 index 00000000000..066d68a7302 --- /dev/null +++ b/packages/server/src/enterprise/emails/workspace_update_invite_cloud.html @@ -0,0 +1,1278 @@ + + + + FlowiseAI + + + + + + + + + + + + + + + + + + + + + +
+ + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + + + + +
+ + + + + + +
+ +
+
+
+
+ + +
+
+ + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + +
+
+ You've been invited
to the {{workspaceName}} workspace. +
+
+
+ Hi there! 👋,

+ Your invitation has been updated.

+ The administrator has modified your invitation details. Your + previous invite link is no longer valid.

To get started: +
    +
  1. Click the button below to create an account
  2. +
  3. You'll get immediate access to the workspace
  4. +
+
+
+
+
+ + +
+
+ + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + Sign up for free + +
+
+
+
+ + +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + +
+
+

+ The FlowiseAI Team +
+
+
+
+ + +
+
+ + + + + + + + + + + + + + +
+ + + + + + +
+ +
+ +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + + + + +
+
+
+
+ + +
+ + + + + + +
+ + + + + + +
+
+   +
+
+
+
+ + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+
+
+
+ + +
+ + +
+
+ + + + +
+ + diff --git a/packages/server/src/enterprise/emails/workspace_update_invite_enterprise.hbs b/packages/server/src/enterprise/emails/workspace_update_invite_enterprise.hbs new file mode 100644 index 00000000000..b81f6016d49 --- /dev/null +++ b/packages/server/src/enterprise/emails/workspace_update_invite_enterprise.hbs @@ -0,0 +1,1166 @@ + + + FlowiseAI + + + + + + + + + + + + + + + + + + + + + +
+ + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + + + + +
+ + + + + + +
+ +
+
+
+
+ + +
+
+ + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + +
+
+ You've been invited +
to the + {{workspaceName}} + workspace in your organization. +
+
+
+ Hi there! 👋, +

+ The administrator has modified your invitation details. Your + previous invite link is no longer valid.

To get started: +
    +
  1. Click the button below to visit the login page
  2. +
  3. + Sign in with your organization's SSO account or use email + and password +
  4. +
  5. You'll get immediate access to the workspace
  6. +
+
+
+
+
+ + +
+
+ + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + Accept Invite + +
+
+
+
+ + +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + +
+
+

+ The FlowiseAI Team +
+
+
+
+ + +
+
+ + + + + + + + + + + + + + +
+ + + + + + +
+ +
+ +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + + + + +
+
+
+
+ + +
+ + + + + + +
+ + + + + + +
+
+   +
+
+
+
+ + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+
+
+
+ + +
+ + +
+
+ + + + +
+ + \ No newline at end of file diff --git a/packages/server/src/enterprise/emails/workspace_update_invite_enterprise.html b/packages/server/src/enterprise/emails/workspace_update_invite_enterprise.html new file mode 100644 index 00000000000..c06a732f997 --- /dev/null +++ b/packages/server/src/enterprise/emails/workspace_update_invite_enterprise.html @@ -0,0 +1,1282 @@ + + + + FlowiseAI + + + + + + + + + + + + + + + + + + + + + +
+ + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + + + + +
+ + + + + + +
+ +
+
+
+
+ + +
+
+ + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + +
+
+ You've been invited
to the {{workspaceName}} workspace in your + organization. +
+
+
+ Hi there! 👋,

+ The administrator has modified your invitation details. Your + previous invite link is no longer valid.

To get started: +
    +
  1. Click the button below to visit the login page
  2. +
  3. + Sign in with your organization's SSO account or use email + and password +
  4. +
  5. You'll get immediate access to the workspace
  6. +
+
+
+
+
+ + +
+
+ + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + Accept Invite + +
+
+
+
+ + +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + +
+
+

+ The FlowiseAI Team +
+
+
+
+ + +
+
+ + + + + + + + + + + + + + +
+ + + + + + +
+ +
+ +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + + + + +
+
+
+
+ + +
+ + + + + + +
+ + + + + + +
+
+   +
+
+
+
+ + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+
+
+
+ + +
+ + +
+
+ + + + +
+ + diff --git a/packages/server/src/enterprise/emails/workspace_user_reset_password.hbs b/packages/server/src/enterprise/emails/workspace_user_reset_password.hbs new file mode 100644 index 00000000000..33c25dc4f1a --- /dev/null +++ b/packages/server/src/enterprise/emails/workspace_user_reset_password.hbs @@ -0,0 +1,877 @@ + + + FlowiseAI + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + +
+ + + + + + +
+ + +
+ + + + + + +
+ + + + + + + + + + +
+ + + + + + +
+ +
+
+
+
+ + +
+
+ + + + + + + + +
+ + + + + + +
+ + +
+ + + + + + +
+ + + + + + + + + + + + + + + + + +
+
Reset your FlowiseAI password +
+
+
+ Hi there! 👋, +

+ We received a request to reset the password for your FlowiseAI + account. If you didn't make the request, you can safely ignore this + email. +

+ To reset your password, follow the instructions below: +
    +
  1. Visit the following link (or click the button below):
  2. + {{resetLink}} +
  3. Choose a new password
  4. +
+
+
+
+
+ + +
+
+ + + + + + + + +
+ + + + + + +
+ + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ Reset Password +
+
+
+
+ + +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + +
+ + +
+ + + + + + +
+ + + + + + +
+
+

+ The FlowiseAI Team +
+
+
+
+ + +
+
+ + + + + + + + + + + + + + +
+ + + + + + +
+ + +
+ + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + + + + +
+
+
+
+ + +
+ + + + + + +
+ + + + + + +
+
  +
+
+
+
+ + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+
+
+
+ + +
+ + +
+
+ + + + +
+ + \ No newline at end of file diff --git a/packages/server/src/enterprise/emails/workspace_user_reset_password.html b/packages/server/src/enterprise/emails/workspace_user_reset_password.html new file mode 100644 index 00000000000..30e31459a0f --- /dev/null +++ b/packages/server/src/enterprise/emails/workspace_user_reset_password.html @@ -0,0 +1,1282 @@ + + + + FlowiseAI + + + + + + + + + + + + + + + + + + + + + +
+ + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + + + + +
+ + + + + + +
+ +
+
+
+
+ + +
+
+ + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + + + + + + + + + + + +
+
+ Reset your FlowiseAI password +
+
+
+ Hi there! 👋,

+ We received a request to reset the password for your FlowiseAI + account. If you didn't make the request, you can safely ignore this + email.

+ To reset your password, follow the instructions below: +
    +
  1. Visit the following link (or click the button below):
  2. + {{resetLink}} +
  3. Choose a new password
  4. +
+
+
+
+
+ + +
+
+ + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + Reset Password + +
+
+
+
+ + +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + +
+ +
+ + + + + + +
+ + + + + + +
+
+

+ The FlowiseAI Team +
+
+
+
+ + +
+
+ + + + + + + + + + + + + + +
+ + + + + + +
+ +
+ +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + + + + +
+
+
+
+ + +
+ + + + + + +
+ + + + + + +
+
+   +
+
+
+
+ + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+ + + + + + +
+
+
+
+ + +
+ + +
+
+ + + + +
+ + diff --git a/packages/server/src/enterprise/middleware/passport/AuthStrategy.ts b/packages/server/src/enterprise/middleware/passport/AuthStrategy.ts new file mode 100644 index 00000000000..eaadbfa5f1d --- /dev/null +++ b/packages/server/src/enterprise/middleware/passport/AuthStrategy.ts @@ -0,0 +1,44 @@ +import { JwtFromRequestFunction, Strategy as JwtStrategy, VerifiedCallback } from 'passport-jwt' +import { decryptToken } from '../../utils/tempTokenUtils' +import { Strategy } from 'passport' +import { Request } from 'express' +import { ICommonObject } from 'flowise-components' + +const _cookieExtractor = (req: any) => { + let jwt = null + + if (req && req.cookies) { + jwt = req.cookies['token'] + } + + return jwt +} + +export const getAuthStrategy = (options: any): Strategy => { + let jwtFromRequest: JwtFromRequestFunction + jwtFromRequest = _cookieExtractor + const jwtOptions = { + jwtFromRequest: jwtFromRequest, + passReqToCallback: true, + ...options + } + const jwtVerify = async (req: Request, payload: ICommonObject, done: VerifiedCallback) => { + try { + if (!req.user) { + return done(null, false, 'Unauthorized.') + } + const meta = decryptToken(payload.meta) + if (!meta) { + return done(null, false, 'Unauthorized.') + } + const ids = meta.split(':') + if (ids.length !== 2 || req.user.id !== ids[0]) { + return done(null, false, 'Unauthorized.') + } + done(null, req.user) + } catch (error) { + done(error, false) + } + } + return new JwtStrategy(jwtOptions, jwtVerify) +} diff --git a/packages/server/src/enterprise/middleware/passport/SessionPersistance.ts b/packages/server/src/enterprise/middleware/passport/SessionPersistance.ts new file mode 100644 index 00000000000..bd21dbae730 --- /dev/null +++ b/packages/server/src/enterprise/middleware/passport/SessionPersistance.ts @@ -0,0 +1,93 @@ +import Redis from 'ioredis' +import { RedisStore } from 'connect-redis' +import { getDatabaseSSLFromEnv } from '../../../DataSource' +import path from 'path' +import { getUserHome } from '../../../utils' + +let redisClient: Redis | null = null +let redisStore: RedisStore | null = null + +export const initializeRedisClientAndStore = (): RedisStore => { + if (!redisClient) { + if (process.env.REDIS_URL) { + redisClient = new Redis(process.env.REDIS_URL) + } else { + redisClient = new Redis({ + host: process.env.REDIS_HOST || 'localhost', + port: parseInt(process.env.REDIS_PORT || '6379'), + username: process.env.REDIS_USERNAME || undefined, + password: process.env.REDIS_PASSWORD || undefined, + tls: + process.env.REDIS_TLS === 'true' + ? { + cert: process.env.REDIS_CERT ? Buffer.from(process.env.REDIS_CERT, 'base64') : undefined, + key: process.env.REDIS_KEY ? Buffer.from(process.env.REDIS_KEY, 'base64') : undefined, + ca: process.env.REDIS_CA ? Buffer.from(process.env.REDIS_CA, 'base64') : undefined + } + : undefined + }) + } + } + if (!redisStore) { + redisStore = new RedisStore({ client: redisClient }) + } + return redisStore +} + +export const initializeDBClientAndStore: any = () => { + const databaseType = process.env.DATABASE_TYPE || 'sqlite' + switch (databaseType) { + case 'mysql': { + const expressSession = require('express-session') + const MySQLStore = require('express-mysql-session')(expressSession) + const options = { + host: process.env.DATABASE_HOST, + port: parseInt(process.env.DATABASE_PORT || '3306'), + user: process.env.DATABASE_USER, + password: process.env.DATABASE_PASSWORD, + database: process.env.DATABASE_NAME, + createDatabaseTable: true, + schema: { + tableName: 'login_sessions' + } + } + return new MySQLStore(options) + } + case 'mariadb': + /* TODO: Implement MariaDB session store */ + break + case 'postgres': { + // default is postgres + const pg = require('pg') + const expressSession = require('express-session') + const pgSession = require('connect-pg-simple')(expressSession) + + const pgPool = new pg.Pool({ + host: process.env.DATABASE_HOST, + port: parseInt(process.env.DATABASE_PORT || '5432'), + user: process.env.DATABASE_USER, + password: process.env.DATABASE_PASSWORD, + database: process.env.DATABASE_NAME, + ssl: getDatabaseSSLFromEnv() + }) + return new pgSession({ + pool: pgPool, // Connection pool + tableName: 'login_sessions', + schemaName: 'public', + createTableIfMissing: true + }) + } + case 'default': + case 'sqlite': { + const expressSession = require('express-session') + const sqlSession = require('connect-sqlite3')(expressSession) + let flowisePath = path.join(getUserHome(), '.flowise') + const homePath = process.env.DATABASE_PATH ?? flowisePath + return new sqlSession({ + db: 'database.sqlite', + table: 'login_sessions', + dir: homePath + }) + } + } +} diff --git a/packages/server/src/enterprise/middleware/passport/index.ts b/packages/server/src/enterprise/middleware/passport/index.ts new file mode 100644 index 00000000000..cdbdeb2b954 --- /dev/null +++ b/packages/server/src/enterprise/middleware/passport/index.ts @@ -0,0 +1,403 @@ +import passport from 'passport' +import { VerifiedCallback } from 'passport-jwt' +import express, { NextFunction, Request, Response } from 'express' +import { ErrorMessage, IAssignedWorkspace, LoggedInUser } from '../../Interface.Enterprise' +import { decryptToken, encryptToken, generateSafeCopy } from '../../utils/tempTokenUtils' +import jwt, { JwtPayload, sign } from 'jsonwebtoken' +import { getAuthStrategy } from './AuthStrategy' +import { IdentityManager } from '../../../IdentityManager' +import { HttpStatusCode } from 'axios' +import { getRunningExpressApp } from '../../../utils/getRunningExpressApp' +import session from 'express-session' +import { OrganizationService } from '../../services/organization.service' +import { AccountService } from '../../services/account.service' +import { WorkspaceUser, WorkspaceUserStatus } from '../../database/entities/workspace-user.entity' +import { RoleErrorMessage, RoleService } from '../../services/role.service' +import { GeneralRole } from '../../database/entities/role.entity' +import { RedisStore } from 'connect-redis' +import { WorkspaceUserService } from '../../services/workspace-user.service' +import { OrganizationUserErrorMessage, OrganizationUserService } from '../../services/organization-user.service' +import { InternalFlowiseError } from '../../../errors/internalFlowiseError' +import { StatusCodes } from 'http-status-codes' +import { OrganizationUserStatus } from '../../database/entities/organization-user.entity' +import { Platform } from '../../../Interface' +import { initializeDBClientAndStore, initializeRedisClientAndStore } from './SessionPersistance' + +const localStrategy = require('passport-local').Strategy + +const jwtAudience = process.env.JWT_AUDIENCE ?? 'AUDIENCE' +const jwtIssuer = process.env.JWT_ISSUER ?? 'ISSUER' + +const expireAuthTokensOnRestart = process.env.EXPIRE_AUTH_TOKENS_ON_RESTART === 'true' +const jwtAuthTokenSecret = process.env.JWT_AUTH_TOKEN_SECRET ?? 'auth_token' +const jwtRefreshSecret = process.env.JWT_REFRESH_TOKEN_SECRET ?? process.env.JWT_AUTH_TOKEN_SECRET ?? 'refresh_token' + +const secureCookie = process.env.APP_URL?.startsWith('https') ? true : false +const jwtOptions = { + secretOrKey: jwtAuthTokenSecret, + audience: jwtAudience, + issuer: jwtIssuer +} + +const _initializePassportMiddleware = async (app: express.Application) => { + // Configure session middleware + let options: any = { + secret: process.env.EXPRESS_SESSION_SECRET || 'flowise', + resave: false, + saveUninitialized: false, + cookie: { + secure: secureCookie, + httpOnly: true, + sameSite: 'lax' // Add sameSite attribute + } + } + + // if the auth tokens are not to be expired on restart, then configure the session store + if (!expireAuthTokensOnRestart) { + // configure session store based on the mode + if (process.env.MODE === 'queue') { + const redisStore = initializeRedisClientAndStore() + options.store = redisStore as RedisStore + } else { + // for the database store, choose store basis the DB configuration from .env + const dbSessionStore = initializeDBClientAndStore() + if (dbSessionStore) { + options.store = dbSessionStore + } + } + } + + app.use(session(options)) + app.use(passport.initialize()) + app.use(passport.session()) + + passport.serializeUser((user: any, done) => { + done(null, user) + }) + + passport.deserializeUser((user: any, done) => { + done(null, user) + }) +} + +export const initializeJwtCookieMiddleware = async (app: express.Application, identityManager: IdentityManager) => { + await _initializePassportMiddleware(app) + + const strategy = getAuthStrategy(jwtOptions) + passport.use(strategy) + passport.use( + 'login', + new localStrategy( + { + usernameField: 'email', + passwordField: 'password', + session: true + }, + async (email: string, password: string, done: VerifiedCallback) => { + let queryRunner + try { + queryRunner = getRunningExpressApp().AppDataSource.createQueryRunner() + await queryRunner.connect() + const accountService = new AccountService() + const body: any = { + user: { + email: email, + credential: password + } + } + const response = await accountService.login(body) + const workspaceUser: WorkspaceUser = + Array.isArray(response.workspaceDetails) && response.workspaceDetails.length > 0 + ? response.workspaceDetails[0] + : (response.workspaceDetails as WorkspaceUser) + const workspaceUserService = new WorkspaceUserService() + workspaceUser.status = WorkspaceUserStatus.ACTIVE + workspaceUser.lastLogin = new Date().toISOString() + workspaceUser.updatedBy = workspaceUser.userId + const organizationUserService = new OrganizationUserService() + const { organizationUser } = await organizationUserService.readOrganizationUserByWorkspaceIdUserId( + workspaceUser.workspaceId, + workspaceUser.userId, + queryRunner + ) + if (!organizationUser) + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, OrganizationUserErrorMessage.ORGANIZATION_USER_NOT_FOUND) + organizationUser.status = OrganizationUserStatus.ACTIVE + await workspaceUserService.updateWorkspaceUser(workspaceUser) + await organizationUserService.updateOrganizationUser(organizationUser) + + const workspaceUsers = await workspaceUserService.readWorkspaceUserByUserId(organizationUser.userId, queryRunner) + const assignedWorkspaces: IAssignedWorkspace[] = workspaceUsers.map((workspaceUser) => { + return { + id: workspaceUser.workspace.id, + name: workspaceUser.workspace.name, + role: workspaceUser.role?.name, + organizationId: workspaceUser.workspace.organizationId + } as IAssignedWorkspace + }) + + let roleService = new RoleService() + const ownerRole = await roleService.readGeneralRoleByName(GeneralRole.OWNER, queryRunner) + const role = await roleService.readRoleById(workspaceUser.roleId, queryRunner) + if (!role) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, RoleErrorMessage.ROLE_NOT_FOUND) + + const orgService = new OrganizationService() + const organization = await orgService.readOrganizationById(organizationUser.organizationId, queryRunner) + if (!organization) { + return done('Organization not found') + } + const subscriptionId = organization.subscriptionId as string + const customerId = organization.customerId as string + const features = await identityManager.getFeaturesByPlan(subscriptionId) + const productId = await identityManager.getProductIdFromSubscription(subscriptionId) + + const loggedInUser: LoggedInUser = { + id: workspaceUser.userId, + email: response.user.email, + name: response.user?.name, + roleId: workspaceUser.roleId, + activeOrganizationId: organization.id, + activeOrganizationSubscriptionId: subscriptionId, + activeOrganizationCustomerId: customerId, + activeOrganizationProductId: productId, + isOrganizationAdmin: workspaceUser.roleId === ownerRole.id, + activeWorkspaceId: workspaceUser.workspaceId, + activeWorkspace: workspaceUser.workspace.name, + assignedWorkspaces, + isApiKeyValidated: true, + permissions: [...JSON.parse(role.permissions)], + features + } + return done(null, loggedInUser, { message: 'Logged in Successfully' }) + } catch (error) { + return done(error) + } finally { + if (queryRunner) await queryRunner.release() + } + } + ) + ) + + app.post('/api/v1/auth/resolve', async (req, res) => { + // check for the organization, if empty redirect to the organization setup page for OpenSource and Enterprise Versions + // for Cloud (Horizontal) version, redirect to the signin page + const expressApp = getRunningExpressApp() + const platform = expressApp.identityManager.getPlatformType() + if (platform === Platform.CLOUD) { + return res.status(HttpStatusCode.Ok).json({ redirectUrl: '/signin' }) + } + const orgService = new OrganizationService() + const queryRunner = expressApp.AppDataSource.createQueryRunner() + await queryRunner.connect() + const registeredOrganizationCount = await orgService.countOrganizations(queryRunner) + await queryRunner.release() + if (registeredOrganizationCount === 0) { + switch (platform) { + case Platform.ENTERPRISE: + if (!identityManager.isLicenseValid()) { + return res.status(HttpStatusCode.Ok).json({ redirectUrl: '/license-expired' }) + } + return res.status(HttpStatusCode.Ok).json({ redirectUrl: '/organization-setup' }) + default: + return res.status(HttpStatusCode.Ok).json({ redirectUrl: '/organization-setup' }) + } + } + switch (platform) { + case Platform.ENTERPRISE: + if (!identityManager.isLicenseValid()) { + return res.status(HttpStatusCode.Ok).json({ redirectUrl: '/license-expired' }) + } + return res.status(HttpStatusCode.Ok).json({ redirectUrl: '/signin' }) + default: + return res.status(HttpStatusCode.Ok).json({ redirectUrl: '/signin' }) + } + }) + + app.post('/api/v1/auth/refreshToken', async (req, res) => { + const refreshToken = req.cookies.refreshToken + if (!refreshToken) return res.sendStatus(401) + + jwt.verify(refreshToken, jwtRefreshSecret, async (err: any, payload: any) => { + if (err || !payload) return res.status(403).json({ message: ErrorMessage.REFRESH_TOKEN_EXPIRED }) + // @ts-ignore + const loggedInUser = req.user as LoggedInUser + let isSSO = false + let newTokenResponse: any = {} + if (loggedInUser && loggedInUser.ssoRefreshToken) { + try { + newTokenResponse = await identityManager.getRefreshToken(loggedInUser.ssoProvider, loggedInUser.ssoRefreshToken) + if (newTokenResponse.error) { + return res.status(403).json({ message: ErrorMessage.REFRESH_TOKEN_EXPIRED }) + } + isSSO = true + } catch (error) { + return res.status(403).json({ message: ErrorMessage.REFRESH_TOKEN_EXPIRED }) + } + } + const meta = decryptToken(payload.meta) + if (!meta) { + return res.status(403).json({ message: ErrorMessage.REFRESH_TOKEN_EXPIRED }) + } + if (isSSO) { + loggedInUser.ssoToken = newTokenResponse.access_token + if (newTokenResponse.refresh_token) { + loggedInUser.ssoRefreshToken = newTokenResponse.refresh_token + } + return setTokenOrCookies(res, loggedInUser, false, req, false, true) + } else { + return setTokenOrCookies(res, loggedInUser, false, req) + } + }) + }) + + app.post('/api/v1/auth/login', (req, res, next?) => { + passport.authenticate('login', async (err: any, user: LoggedInUser) => { + try { + if (err || !user) { + return next ? next(err) : res.status(401).json(err) + } + if (identityManager.isEnterprise() && !identityManager.isLicenseValid()) { + return res.status(401).json({ redirectUrl: '/license-expired' }) + } + req.login(user, { session: true }, async (error) => { + if (error) { + return next ? next(error) : res.status(401).json(error) + } + return setTokenOrCookies(res, user, true, req) + }) + } catch (error: any) { + return next ? next(error) : res.status(401).json(error) + } + })(req, res, next) + }) +} + +export const setTokenOrCookies = ( + res: Response, + user: any, + regenerateRefreshToken: boolean, + req?: Request, + redirect?: boolean, + isSSO?: boolean +) => { + const token = generateJwtAuthToken(user) + let refreshToken: string = '' + if (regenerateRefreshToken) { + refreshToken = generateJwtRefreshToken(user) + } else { + refreshToken = req?.cookies?.refreshToken + } + const returnUser = generateSafeCopy(user) + returnUser.isSSO = !isSSO ? false : isSSO + + if (redirect) { + // Send user data as part of the redirect URL (using query parameters) + const dashboardUrl = `/sso-success?user=${encodeURIComponent(JSON.stringify(returnUser))}` + // Return the token as a cookie in our response. + let resWithCookies = res + .cookie('token', token, { + httpOnly: true, + secure: secureCookie, + sameSite: 'lax' + }) + .cookie('refreshToken', refreshToken, { + httpOnly: true, + secure: secureCookie, + sameSite: 'lax' + }) + resWithCookies.redirect(dashboardUrl) + } else { + // Return the token as a cookie in our response. + res.cookie('token', token, { + httpOnly: true, + secure: secureCookie, + sameSite: 'lax' + }) + .cookie('refreshToken', refreshToken, { + httpOnly: true, + secure: secureCookie, + sameSite: 'lax' + }) + .type('json') + .send({ ...returnUser }) + } +} + +export const generateJwtAuthToken = (user: any) => { + let expiryInMinutes = -1 + if (user?.ssoToken) { + const jwtHeader = jwt.decode(user.ssoToken, { complete: true }) + if (jwtHeader) { + const utcSeconds = (jwtHeader.payload as any).exp + let d = new Date(0) // The 0 there is the key, which sets the date to the epoch + d.setUTCSeconds(utcSeconds) + // get the minutes difference from current time + expiryInMinutes = Math.abs(d.getTime() - new Date().getTime()) / 60000 + } + } + if (expiryInMinutes === -1) { + expiryInMinutes = process.env.JWT_TOKEN_EXPIRY_IN_MINUTES ? parseInt(process.env.JWT_TOKEN_EXPIRY_IN_MINUTES) : 60 + } + return _generateJwtToken(user, expiryInMinutes, jwtAuthTokenSecret) +} + +export const generateJwtRefreshToken = (user: any) => { + let expiryInMinutes = -1 + if (user.ssoRefreshToken) { + const jwtHeader = jwt.decode(user.ssoRefreshToken, { complete: false }) + if (jwtHeader && typeof jwtHeader !== 'string') { + const utcSeconds = (jwtHeader as JwtPayload).exp + if (utcSeconds) { + let d = new Date(0) // The 0 there is the key, which sets the date to the epoch + d.setUTCSeconds(utcSeconds) + // get the minutes difference from current time + expiryInMinutes = Math.abs(d.getTime() - new Date().getTime()) / 60000 + } + } + } + if (expiryInMinutes === -1) { + expiryInMinutes = process.env.JWT_REFRESH_TOKEN_EXPIRY_IN_MINUTES + ? parseInt(process.env.JWT_REFRESH_TOKEN_EXPIRY_IN_MINUTES) + : 129600 // 90 days + } + return _generateJwtToken(user, expiryInMinutes, jwtRefreshSecret) +} + +const _generateJwtToken = (user: Partial, expiryInMinutes: number, secret: string) => { + const encryptedUserInfo = encryptToken(user?.id + ':' + user?.activeWorkspaceId) + return sign({ id: user?.id, username: user?.name, meta: encryptedUserInfo }, secret!, { + expiresIn: expiryInMinutes + 'm', // Expiry in minutes + notBefore: '0', // Cannot use before now, can be configured to be deferred. + algorithm: 'HS256', // HMAC using SHA-256 hash algorithm + audience: jwtAudience, // The audience of the token + issuer: jwtIssuer // The issuer of the token + }) +} + +export const verifyToken = (req: Request, res: Response, next: NextFunction) => { + passport.authenticate('jwt', { session: true }, (err: any, user: LoggedInUser, info: object) => { + if (err) { + return next(err) + } + + // @ts-ignore + if (info && info.name === 'TokenExpiredError') { + if (req.cookies && req.cookies.refreshToken) { + return res.status(401).json({ message: ErrorMessage.TOKEN_EXPIRED, retry: true }) + } + return res.status(401).json({ message: ErrorMessage.INVALID_MISSING_TOKEN }) + } + + if (!user) { + return res.status(401).json({ message: ErrorMessage.INVALID_MISSING_TOKEN }) + } + + const identityManager = getRunningExpressApp().identityManager + if (identityManager.isEnterprise() && !identityManager.isLicenseValid()) { + return res.status(401).json({ redirectUrl: '/license-expired' }) + } + + req.user = user + next() + })(req, res, next) +} diff --git a/packages/server/src/enterprise/middleware/prometheus/index.ts b/packages/server/src/enterprise/middleware/prometheus/index.ts new file mode 100644 index 00000000000..d0334825acf --- /dev/null +++ b/packages/server/src/enterprise/middleware/prometheus/index.ts @@ -0,0 +1,65 @@ +import express from 'express' +import promClient, { Counter } from 'prom-client' + +export const initializePrometheus = (app: express.Application) => { + const register = new promClient.Registry() + register.setDefaultLabels({ + app: 'FlowiseAI' + }) + + const predictionsTotal = new promClient.Counter({ + name: 'checkouts_total', + help: 'Total number of checkouts', + labelNames: ['payment_method'] + }) + + const requestCounter = new Counter({ + name: 'http_requests_total', + help: 'Total number of HTTP requests', + labelNames: ['method', 'path', 'status'] + }) + + app.use('/api/v1/prediction', async (req, res) => { + res.on('finish', async () => { + requestCounter.labels(req?.method, req?.path, res.statusCode.toString()).inc() + predictionsTotal.labels('success').inc() + }) + }) + + // enable default metrics like CPU usage, memory usage, etc. + promClient.collectDefaultMetrics({ register }) + // Add our custom metric to the registry + register.registerMetric(requestCounter) + register.registerMetric(predictionsTotal) + + // Add Prometheus middleware to the app + app.use('/api/v1/metrics', async (req, res) => { + res.set('Content-Type', register.contentType) + const currentMetrics = await register.metrics() + res.send(currentMetrics) + }) + + const httpRequestDurationMicroseconds = new promClient.Histogram({ + name: 'http_request_duration_ms', + help: 'Duration of HTTP requests in ms', + labelNames: ['method', 'route', 'code'], + buckets: [1, 5, 15, 50, 100, 200, 300, 400, 500] // buckets for response time from 0.1ms to 500ms + }) + register.registerMetric(httpRequestDurationMicroseconds) + + // Runs before each requests + app.use((req, res, next) => { + res.locals.startEpoch = Date.now() + next() + }) + + // Runs after each requests + app.use((req, res, next) => { + res.on('finish', async () => { + requestCounter.inc() + const responseTimeInMs = Date.now() - res.locals.startEpoch + httpRequestDurationMicroseconds.labels(req.method, req?.route?.path, res.statusCode.toString()).observe(responseTimeInMs) + }) + next() + }) +} diff --git a/packages/server/src/enterprise/rbac/PermissionCheck.ts b/packages/server/src/enterprise/rbac/PermissionCheck.ts new file mode 100644 index 00000000000..583d4ad604c --- /dev/null +++ b/packages/server/src/enterprise/rbac/PermissionCheck.ts @@ -0,0 +1,46 @@ +import { NextFunction, Request, Response } from 'express' +import { ErrorMessage } from '../Interface.Enterprise' + +// Check if the user has the required permission for a route +export const checkPermission = (permission: string) => { + return (req: Request, res: Response, next: NextFunction) => { + const user = req.user + // if the user is not logged in, return forbidden + if (user) { + if (user.isApiKeyValidated || user.isOrganizationAdmin) { + return next() + } + const permissions = user.permissions + if (permissions && permissions.includes(permission)) { + return next() + } + } + // else throw 403 forbidden error + return res.status(403).json({ message: ErrorMessage.FORBIDDEN }) + } +} + +// checks for any permission, input is the permissions separated by comma +export const checkAnyPermission = (permissionsString: string) => { + return (req: Request, res: Response, next: NextFunction) => { + const user = req.user + // if the user is not logged in, return forbidden + if (user) { + if (user.isApiKeyValidated || user.isOrganizationAdmin) { + return next() + } + const permissions = user.permissions + const permissionIds = permissionsString.split(',') + if (permissions && permissions.length) { + // split permissions and check if any of the permissions are present in the user's permissions + for (let i = 0; i < permissionIds.length; i++) { + if (permissions.includes(permissionIds[i])) { + return next() + } + } + } + } + // else throw 403 forbidden error + return res.status(403).json({ message: ErrorMessage.FORBIDDEN }) + } +} diff --git a/packages/server/src/enterprise/rbac/Permissions.ts b/packages/server/src/enterprise/rbac/Permissions.ts new file mode 100644 index 00000000000..e44f541a820 --- /dev/null +++ b/packages/server/src/enterprise/rbac/Permissions.ts @@ -0,0 +1,179 @@ +export class Permissions { + private categories: PermissionCategory[] = [] + constructor() { + // const auditCategory = new PermissionCategory('audit') + // auditCategory.addPermission(new Permission('auditLogs:view', 'View Audit Logs')) + // this.categories.push(auditCategory) + + const chatflowsCategory = new PermissionCategory('chatflows') + chatflowsCategory.addPermission(new Permission('chatflows:view', 'View')) + chatflowsCategory.addPermission(new Permission('chatflows:create', 'Create')) + chatflowsCategory.addPermission(new Permission('chatflows:update', 'Update')) + chatflowsCategory.addPermission(new Permission('chatflows:duplicate', 'Duplicate')) + chatflowsCategory.addPermission(new Permission('chatflows:delete', 'Delete')) + chatflowsCategory.addPermission(new Permission('chatflows:export', 'Export')) + chatflowsCategory.addPermission(new Permission('chatflows:import', 'Import')) + chatflowsCategory.addPermission(new Permission('chatflows:config', 'Edit Configuration')) + chatflowsCategory.addPermission(new Permission('chatflows:domains', 'Allowed Domains')) + this.categories.push(chatflowsCategory) + + const agentflowsCategory = new PermissionCategory('agentflows') + agentflowsCategory.addPermission(new Permission('agentflows:view', 'View')) + agentflowsCategory.addPermission(new Permission('agentflows:create', 'Create')) + agentflowsCategory.addPermission(new Permission('agentflows:update', 'Update')) + agentflowsCategory.addPermission(new Permission('agentflows:duplicate', 'Duplicate')) + agentflowsCategory.addPermission(new Permission('agentflows:delete', 'Delete')) + agentflowsCategory.addPermission(new Permission('agentflows:export', 'Export')) + agentflowsCategory.addPermission(new Permission('agentflows:import', 'Import')) + agentflowsCategory.addPermission(new Permission('agentflows:config', 'Edit Configuration')) + agentflowsCategory.addPermission(new Permission('agentflows:domains', 'Allowed Domains')) + this.categories.push(agentflowsCategory) + + const toolsCategory = new PermissionCategory('tools') + toolsCategory.addPermission(new Permission('tools:view', 'View')) + toolsCategory.addPermission(new Permission('tools:create', 'Create')) + toolsCategory.addPermission(new Permission('tools:update', 'Update')) + toolsCategory.addPermission(new Permission('tools:delete', 'Delete')) + toolsCategory.addPermission(new Permission('tools:export', 'Export')) + this.categories.push(toolsCategory) + + const assistantsCategory = new PermissionCategory('assistants') + assistantsCategory.addPermission(new Permission('assistants:view', 'View')) + assistantsCategory.addPermission(new Permission('assistants:create', 'Create')) + assistantsCategory.addPermission(new Permission('assistants:update', 'Update')) + assistantsCategory.addPermission(new Permission('assistants:delete', 'Delete')) + this.categories.push(assistantsCategory) + + const credentialsCategory = new PermissionCategory('credentials') + credentialsCategory.addPermission(new Permission('credentials:view', 'View')) + credentialsCategory.addPermission(new Permission('credentials:create', 'Create')) + credentialsCategory.addPermission(new Permission('credentials:update', 'Update')) + credentialsCategory.addPermission(new Permission('credentials:delete', 'Delete')) + credentialsCategory.addPermission(new Permission('credentials:share', 'Share')) + this.categories.push(credentialsCategory) + + const variablesCategory = new PermissionCategory('variables') + variablesCategory.addPermission(new Permission('variables:view', 'View')) + variablesCategory.addPermission(new Permission('variables:create', 'Create')) + variablesCategory.addPermission(new Permission('variables:update', 'Update')) + variablesCategory.addPermission(new Permission('variables:delete', 'Delete')) + this.categories.push(variablesCategory) + + const apikeysCategory = new PermissionCategory('apikeys') + apikeysCategory.addPermission(new Permission('apikeys:view', 'View')) + apikeysCategory.addPermission(new Permission('apikeys:create', 'Create')) + apikeysCategory.addPermission(new Permission('apikeys:update', 'Update')) + apikeysCategory.addPermission(new Permission('apikeys:delete', 'Delete')) + apikeysCategory.addPermission(new Permission('apikeys:import', 'Import')) + this.categories.push(apikeysCategory) + + const documentStoresCategory = new PermissionCategory('documentStores') + documentStoresCategory.addPermission(new Permission('documentStores:view', 'View')) + documentStoresCategory.addPermission(new Permission('documentStores:create', 'Create')) + documentStoresCategory.addPermission(new Permission('documentStores:update', 'Update')) + documentStoresCategory.addPermission(new Permission('documentStores:delete', 'Delete Document Store')) + documentStoresCategory.addPermission(new Permission('documentStores:add-loader', 'Add Document Loader')) + documentStoresCategory.addPermission(new Permission('documentStores:delete-loader', 'Delete Document Loader')) + documentStoresCategory.addPermission(new Permission('documentStores:preview-process', 'Preview & Process Document Chunks')) + documentStoresCategory.addPermission(new Permission('documentStores:upsert-config', 'Upsert Config')) + this.categories.push(documentStoresCategory) + + const datasetsCategory = new PermissionCategory('datasets') + datasetsCategory.addPermission(new Permission('datasets:view', 'View')) + datasetsCategory.addPermission(new Permission('datasets:create', 'Create')) + datasetsCategory.addPermission(new Permission('datasets:update', 'Update')) + datasetsCategory.addPermission(new Permission('datasets:delete', 'Delete')) + this.categories.push(datasetsCategory) + + const executionsCategory = new PermissionCategory('executions') + executionsCategory.addPermission(new Permission('executions:view', 'View')) + executionsCategory.addPermission(new Permission('executions:delete', 'Delete')) + this.categories.push(executionsCategory) + + const evaluatorsCategory = new PermissionCategory('evaluators') + evaluatorsCategory.addPermission(new Permission('evaluators:view', 'View')) + evaluatorsCategory.addPermission(new Permission('evaluators:create', 'Create')) + evaluatorsCategory.addPermission(new Permission('evaluators:update', 'Update')) + evaluatorsCategory.addPermission(new Permission('evaluators:delete', 'Delete')) + this.categories.push(evaluatorsCategory) + + const evaluationsCategory = new PermissionCategory('evaluations') + evaluationsCategory.addPermission(new Permission('evaluations:view', 'View')) + evaluationsCategory.addPermission(new Permission('evaluations:create', 'Create')) + evaluationsCategory.addPermission(new Permission('evaluations:update', 'Update')) + evaluationsCategory.addPermission(new Permission('evaluations:delete', 'Delete')) + evaluationsCategory.addPermission(new Permission('evaluations:run', 'Run Again')) + this.categories.push(evaluationsCategory) + + const templatesCategory = new PermissionCategory('templates') + templatesCategory.addPermission(new Permission('templates:marketplace', 'View Marketplace Templates')) + templatesCategory.addPermission(new Permission('templates:custom', 'View Custom Templates')) + templatesCategory.addPermission(new Permission('templates:custom-delete', 'Delete Custom Template')) + templatesCategory.addPermission(new Permission('templates:toolexport', 'Export Tool as Template')) + templatesCategory.addPermission(new Permission('templates:flowexport', 'Export Flow as Template')) + templatesCategory.addPermission(new Permission('templates:custom-share', 'Share Custom Templates')) + this.categories.push(templatesCategory) + + const workspaceCategory = new PermissionCategory('workspace') + workspaceCategory.addPermission(new Permission('workspace:view', 'View')) + workspaceCategory.addPermission(new Permission('workspace:create', 'Create')) + workspaceCategory.addPermission(new Permission('workspace:update', 'Update')) + workspaceCategory.addPermission(new Permission('workspace:add-user', 'Add User')) + workspaceCategory.addPermission(new Permission('workspace:unlink-user', 'Remove User')) + workspaceCategory.addPermission(new Permission('workspace:delete', 'Delete')) + workspaceCategory.addPermission(new Permission('workspace:export', 'Export Data within Workspace')) + workspaceCategory.addPermission(new Permission('workspace:import', 'Import Data within Workspace')) + this.categories.push(workspaceCategory) + + const adminCategory = new PermissionCategory('admin') + adminCategory.addPermission(new Permission('users:manage', 'Manage Users')) + adminCategory.addPermission(new Permission('roles:manage', 'Manage Roles')) + adminCategory.addPermission(new Permission('sso:manage', 'Manage SSO')) + this.categories.push(adminCategory) + + const logsCategory = new PermissionCategory('logs') + logsCategory.addPermission(new Permission('logs:view', 'View Logs', true)) + this.categories.push(logsCategory) + + const loginActivityCategory = new PermissionCategory('loginActivity') + loginActivityCategory.addPermission(new Permission('loginActivity:view', 'View Login Activity', true)) + loginActivityCategory.addPermission(new Permission('loginActivity:delete', 'Delete Login Activity', true)) + this.categories.push(loginActivityCategory) + } + + public toJSON(): { [key: string]: { key: string; value: string }[] } { + return this.categories.reduce((acc, category) => { + return { + ...acc, + ...category.toJSON() + } + }, {}) + } +} + +export class PermissionCategory { + public permissions: any[] = [] + + constructor(public category: string) {} + + addPermission(permission: Permission) { + this.permissions.push(permission) + } + public toJSON() { + return { + [this.category]: [...this.permissions.map((permission) => permission.toJSON())] + } + } +} + +export class Permission { + constructor(public name: string, public description: string, public isEnterprise: boolean = false) {} + + public toJSON() { + return { + key: this.name, + value: this.description, + isEnterprise: this.isEnterprise + } + } +} diff --git a/packages/server/src/enterprise/routes/account.route.ts b/packages/server/src/enterprise/routes/account.route.ts new file mode 100644 index 00000000000..ce4eb9175b0 --- /dev/null +++ b/packages/server/src/enterprise/routes/account.route.ts @@ -0,0 +1,39 @@ +import express from 'express' +import { AccountController } from '../controllers/account.controller' +import { IdentityManager } from '../../IdentityManager' +import { checkAnyPermission } from '../rbac/PermissionCheck' + +const router = express.Router() +const accountController = new AccountController() + +router.post('/register', accountController.register) + +// feature flag to workspace since only user who has workspaces can invite +router.post( + '/invite', + IdentityManager.checkFeatureByPlan('feat:workspaces'), + checkAnyPermission('workspace:add-user,users:manage'), + accountController.invite +) + +router.post('/login', accountController.login) + +router.post('/logout', accountController.logout) + +router.post('/verify', accountController.verify) + +router.post('/resend-verification', accountController.resendVerificationEmail) + +router.post('/forgot-password', accountController.forgotPassword) + +router.post('/reset-password', accountController.resetPassword) + +router.post('/cancel-subscription', accountController.cancelPreviousCloudSubscrption) + +router.get('/billing', accountController.createStripeCustomerPortalSession) + +router.get('/basic-auth', accountController.getBasicAuth) + +router.post('/basic-auth', accountController.checkBasicAuth) + +export default router diff --git a/packages/server/src/enterprise/routes/audit/index.ts b/packages/server/src/enterprise/routes/audit/index.ts new file mode 100644 index 00000000000..5ddd7d54772 --- /dev/null +++ b/packages/server/src/enterprise/routes/audit/index.ts @@ -0,0 +1,9 @@ +import express from 'express' +import auditController from '../../controllers/audit' +import { checkPermission } from '../../rbac/PermissionCheck' +const router = express.Router() + +router.post(['/', '/login-activity'], checkPermission('loginActivity:view'), auditController.fetchLoginActivity) +router.post(['/', '/login-activity/delete'], checkPermission('loginActivity:delete'), auditController.deleteLoginActivity) + +export default router diff --git a/packages/server/src/enterprise/routes/auth/index.ts b/packages/server/src/enterprise/routes/auth/index.ts new file mode 100644 index 00000000000..5845f3a3eb8 --- /dev/null +++ b/packages/server/src/enterprise/routes/auth/index.ts @@ -0,0 +1,8 @@ +import express from 'express' +import authController from '../../controllers/auth' +const router = express.Router() + +// RBAC +router.get(['/', '/permissions'], authController.getAllPermissions) + +export default router diff --git a/packages/server/src/enterprise/routes/login-method.route.ts b/packages/server/src/enterprise/routes/login-method.route.ts new file mode 100644 index 00000000000..f1c3912e261 --- /dev/null +++ b/packages/server/src/enterprise/routes/login-method.route.ts @@ -0,0 +1,18 @@ +import express from 'express' +import { LoginMethodController } from '../controllers/login-method.controller' +import { checkPermission } from '../rbac/PermissionCheck' + +const router = express.Router() +const loginMethodController = new LoginMethodController() + +router.get('/', loginMethodController.read) + +router.get('/default', loginMethodController.defaultMethods) + +router.post('/', checkPermission('sso:manage'), loginMethodController.create) + +router.put('/', checkPermission('sso:manage'), loginMethodController.update) + +router.post('/test', checkPermission('sso:manage'), loginMethodController.testConfig) + +export default router diff --git a/packages/server/src/enterprise/routes/organization-user.route.ts b/packages/server/src/enterprise/routes/organization-user.route.ts new file mode 100644 index 00000000000..99241756e9d --- /dev/null +++ b/packages/server/src/enterprise/routes/organization-user.route.ts @@ -0,0 +1,17 @@ +import express from 'express' +import { OrganizationUserController } from '../controllers/organization-user.controller' +import { checkPermission } from '../rbac/PermissionCheck' +import { IdentityManager } from '../../IdentityManager' + +const router = express.Router() +const organizationUserController = new OrganizationUserController() + +router.get('/', organizationUserController.read) + +router.post('/', IdentityManager.checkFeatureByPlan('feat:users'), checkPermission('users:manage'), organizationUserController.create) + +router.put('/', IdentityManager.checkFeatureByPlan('feat:users'), checkPermission('users:manage'), organizationUserController.update) + +router.delete('/', IdentityManager.checkFeatureByPlan('feat:users'), checkPermission('users:manage'), organizationUserController.delete) + +export default router diff --git a/packages/server/src/enterprise/routes/organization.route.ts b/packages/server/src/enterprise/routes/organization.route.ts new file mode 100644 index 00000000000..52dc17c2646 --- /dev/null +++ b/packages/server/src/enterprise/routes/organization.route.ts @@ -0,0 +1,27 @@ +import express from 'express' +import { OrganizationController } from '../controllers/organization.controller' + +const router = express.Router() +const organizationController = new OrganizationController() + +router.get('/', organizationController.read) + +router.post('/', organizationController.create) + +router.put('/', organizationController.update) + +router.get('/additional-seats-quantity', organizationController.getAdditionalSeatsQuantity) + +router.get('/customer-default-source', organizationController.getCustomerWithDefaultSource) + +router.get('/additional-seats-proration', organizationController.getAdditionalSeatsProration) + +router.post('/update-additional-seats', organizationController.updateAdditionalSeats) + +router.get('/plan-proration', organizationController.getPlanProration) + +router.post('/update-subscription-plan', organizationController.updateSubscriptionPlan) + +router.get('/get-current-usage', organizationController.getCurrentUsage) + +export default router diff --git a/packages/server/src/enterprise/routes/role.route.ts b/packages/server/src/enterprise/routes/role.route.ts new file mode 100644 index 00000000000..19225ba8c14 --- /dev/null +++ b/packages/server/src/enterprise/routes/role.route.ts @@ -0,0 +1,16 @@ +import express from 'express' +import { RoleController } from '../controllers/role.controller' +import { checkPermission } from '../rbac/PermissionCheck' + +const router = express.Router() +const roleController = new RoleController() + +router.get('/', roleController.read) + +router.post('/', checkPermission('roles:manage'), roleController.create) + +router.put('/', checkPermission('roles:manage'), roleController.update) + +router.delete('/', checkPermission('roles:manage'), roleController.delete) + +export default router diff --git a/packages/server/src/enterprise/routes/user.route.ts b/packages/server/src/enterprise/routes/user.route.ts new file mode 100644 index 00000000000..dcfc50487e7 --- /dev/null +++ b/packages/server/src/enterprise/routes/user.route.ts @@ -0,0 +1,14 @@ +import express from 'express' +import { UserController } from '../controllers/user.controller' + +const router = express.Router() +const userController = new UserController() + +router.get('/', userController.read) +router.get('/test', userController.test) + +router.post('/', userController.create) + +router.put('/', userController.update) + +export default router diff --git a/packages/server/src/enterprise/routes/workspace-user.route.ts b/packages/server/src/enterprise/routes/workspace-user.route.ts new file mode 100644 index 00000000000..12ec8244723 --- /dev/null +++ b/packages/server/src/enterprise/routes/workspace-user.route.ts @@ -0,0 +1,33 @@ +import express from 'express' +import { WorkspaceUserController } from '../controllers/workspace-user.controller' +import { IdentityManager } from '../../IdentityManager' +import { checkPermission } from '../rbac/PermissionCheck' + +const router = express.Router() +const workspaceUserController = new WorkspaceUserController() + +// no feature flag because user with lower plan can read invited workspaces with higher plan +router.get('/', workspaceUserController.read) + +router.post( + '/', + IdentityManager.checkFeatureByPlan('feat:workspaces'), + checkPermission('workspace:add-user'), + workspaceUserController.create +) + +router.put( + '/', + IdentityManager.checkFeatureByPlan('feat:workspaces'), + checkPermission('workspace:add-user'), + workspaceUserController.update +) + +router.delete( + '/', + IdentityManager.checkFeatureByPlan('feat:workspaces'), + checkPermission('workspace:unlink-user'), + workspaceUserController.delete +) + +export default router diff --git a/packages/server/src/enterprise/routes/workspace.route.ts b/packages/server/src/enterprise/routes/workspace.route.ts new file mode 100644 index 00000000000..2e27aa91b6f --- /dev/null +++ b/packages/server/src/enterprise/routes/workspace.route.ts @@ -0,0 +1,38 @@ +import express from 'express' +import { WorkspaceController } from '../controllers/workspace.controller' +import { IdentityManager } from '../../IdentityManager' +import { checkPermission } from '../rbac/PermissionCheck' + +const router = express.Router() +const workspaceController = new WorkspaceController() + +router.get('/', IdentityManager.checkFeatureByPlan('feat:workspaces'), checkPermission('workspace:view'), workspaceController.read) + +router.post('/', IdentityManager.checkFeatureByPlan('feat:workspaces'), checkPermission('workspace:create'), workspaceController.create) + +// no feature flag because user with lower plan can switch to invited workspaces with higher plan +router.post('/switch', workspaceController.switchWorkspace) + +router.put('/', IdentityManager.checkFeatureByPlan('feat:workspaces'), checkPermission('workspace:update'), workspaceController.update) + +router.delete( + ['/', '/:id'], + IdentityManager.checkFeatureByPlan('feat:workspaces'), + checkPermission('workspace:delete'), + workspaceController.delete +) + +router.get( + ['/shared', '/shared/:id'], + IdentityManager.checkFeatureByPlan('feat:workspaces'), + checkPermission('workspace:create'), + workspaceController.getSharedWorkspacesForItem +) +router.post( + ['/shared', '/shared/:id'], + IdentityManager.checkFeatureByPlan('feat:workspaces'), + checkPermission('workspace:create'), + workspaceController.setSharedWorkspacesForItem +) + +export default router diff --git a/packages/server/src/enterprise/services/account.service.ts b/packages/server/src/enterprise/services/account.service.ts new file mode 100644 index 00000000000..268326542ef --- /dev/null +++ b/packages/server/src/enterprise/services/account.service.ts @@ -0,0 +1,592 @@ +import bcrypt from 'bcryptjs' +import { StatusCodes } from 'http-status-codes' +import moment from 'moment' +import { DataSource, QueryRunner } from 'typeorm' +import { InternalFlowiseError } from '../../errors/internalFlowiseError' +import { IdentityManager } from '../../IdentityManager' +import { Platform, UserPlan } from '../../Interface' +import { GeneralErrorMessage } from '../../utils/constants' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' +import { checkUsageLimit } from '../../utils/quotaUsage' +import { OrganizationUser, OrganizationUserStatus } from '../database/entities/organization-user.entity' +import { Organization, OrganizationName } from '../database/entities/organization.entity' +import { GeneralRole, Role } from '../database/entities/role.entity' +import { User, UserStatus } from '../database/entities/user.entity' +import { WorkspaceUser, WorkspaceUserStatus } from '../database/entities/workspace-user.entity' +import { Workspace, WorkspaceName } from '../database/entities/workspace.entity' +import { LoggedInUser, LoginActivityCode } from '../Interface.Enterprise' +import { compareHash } from '../utils/encryption.util' +import { sendPasswordResetEmail, sendVerificationEmailForCloud, sendWorkspaceAdd, sendWorkspaceInvite } from '../utils/sendEmail' +import { generateTempToken } from '../utils/tempTokenUtils' +import auditService from './audit' +import { OrganizationUserErrorMessage, OrganizationUserService } from './organization-user.service' +import { OrganizationErrorMessage, OrganizationService } from './organization.service' +import { RoleErrorMessage, RoleService } from './role.service' +import { UserErrorMessage, UserService } from './user.service' +import { WorkspaceUserErrorMessage, WorkspaceUserService } from './workspace-user.service' +import { WorkspaceErrorMessage, WorkspaceService } from './workspace.service' + +type AccountDTO = { + user: Partial + organization: Partial + organizationUser: Partial + workspace: Partial + workspaceUser: Partial + role: Partial +} + +export class AccountService { + private dataSource: DataSource + private userService: UserService + private organizationservice: OrganizationService + private workspaceService: WorkspaceService + private roleService: RoleService + private organizationUserService: OrganizationUserService + private workspaceUserService: WorkspaceUserService + private identityManager: IdentityManager + + constructor() { + const appServer = getRunningExpressApp() + this.dataSource = appServer.AppDataSource + this.userService = new UserService() + this.organizationservice = new OrganizationService() + this.workspaceService = new WorkspaceService() + this.roleService = new RoleService() + this.organizationUserService = new OrganizationUserService() + this.workspaceUserService = new WorkspaceUserService() + this.identityManager = appServer.identityManager + } + + private initializeAccountDTO(data: AccountDTO) { + data.organization = data.organization || {} + data.organizationUser = data.organizationUser || {} + data.workspace = data.workspace || {} + data.workspaceUser = data.workspaceUser || {} + data.role = data.role || {} + + return data + } + + public async resendVerificationEmail({ email }: { email: string }) { + const queryRunner = this.dataSource.createQueryRunner() + await queryRunner.connect() + try { + await queryRunner.startTransaction() + + const user = await this.userService.readUserByEmail(email, queryRunner) + if (!user) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + if (user && user.status === UserStatus.ACTIVE) + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_EMAIL_ALREADY_EXISTS) + + if (!user.email) throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, UserErrorMessage.INVALID_USER_EMAIL) + + const updateUserData: Partial = {} + updateUserData.tempToken = generateTempToken() + const tokenExpiry = new Date() + const expiryInHours = process.env.INVITE_TOKEN_EXPIRY_IN_HOURS ? parseInt(process.env.INVITE_TOKEN_EXPIRY_IN_HOURS) : 24 + tokenExpiry.setHours(tokenExpiry.getHours() + expiryInHours) + updateUserData.tokenExpiry = tokenExpiry + + // Update user with new token and expiry + const updatedUser = queryRunner.manager.merge(User, user, updateUserData) + await queryRunner.manager.save(User, updatedUser) + + // resend invite + const verificationLink = `${process.env.APP_URL}/verify?token=${updateUserData.tempToken}` + await sendVerificationEmailForCloud(email, verificationLink) + + await queryRunner.commitTransaction() + } catch (error) { + await queryRunner.rollbackTransaction() + throw error + } finally { + await queryRunner.release() + } + } + + private async createRegisterAccount(data: AccountDTO, queryRunner: QueryRunner) { + data = this.initializeAccountDTO(data) + + const platform = this.identityManager.getPlatformType() + + switch (platform) { + case Platform.OPEN_SOURCE: + data.organization.name = OrganizationName.DEFAULT_ORGANIZATION + data.organizationUser.role = await this.roleService.readGeneralRoleByName(GeneralRole.OWNER, queryRunner) + data.workspace.name = WorkspaceName.DEFAULT_WORKSPACE + data.workspaceUser.role = data.organizationUser.role + data.user.status = UserStatus.ACTIVE + data.user = await this.userService.createNewUser(data.user, queryRunner) + break + case Platform.CLOUD: { + const user = await this.userService.readUserByEmail(data.user.email, queryRunner) + if (user && (user.status === UserStatus.ACTIVE || user.status === UserStatus.UNVERIFIED)) + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_EMAIL_ALREADY_EXISTS) + + if (!data.user.email) throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, UserErrorMessage.INVALID_USER_EMAIL) + const { customerId, subscriptionId } = await this.identityManager.createStripeUserAndSubscribe({ + email: data.user.email, + userPlan: UserPlan.FREE, + // @ts-ignore + referral: data.user.referral || '' + }) + data.organization.customerId = customerId + data.organization.subscriptionId = subscriptionId + + // if credential exists then the user is signing up with email/password + // if not then the user is signing up with oauth/sso + if (data.user.credential) { + data.user.status = UserStatus.UNVERIFIED + data.user.tempToken = generateTempToken() + const tokenExpiry = new Date() + const expiryInHours = process.env.INVITE_TOKEN_EXPIRY_IN_HOURS ? parseInt(process.env.INVITE_TOKEN_EXPIRY_IN_HOURS) : 24 + tokenExpiry.setHours(tokenExpiry.getHours() + expiryInHours) + data.user.tokenExpiry = tokenExpiry + } else { + data.user.status = UserStatus.ACTIVE + data.user.tempToken = '' + data.user.tokenExpiry = null + } + data.organization.name = OrganizationName.DEFAULT_ORGANIZATION + data.organizationUser.role = await this.roleService.readGeneralRoleByName(GeneralRole.OWNER, queryRunner) + data.workspace.name = WorkspaceName.DEFAULT_WORKSPACE + data.workspaceUser.role = data.organizationUser.role + if (!user) { + data.user = await this.userService.createNewUser(data.user, queryRunner) + } else { + if (data.user.credential) data.user.credential = this.userService.encryptUserCredential(data.user.credential) + data.user.updatedBy = user.id + data.user = queryRunner.manager.merge(User, user, data.user) + } + // send verification email only if user signed up with email/password + if (data.user.credential) { + const verificationLink = `${process.env.APP_URL}/verify?token=${data.user.tempToken}` + await sendVerificationEmailForCloud(data.user.email!, verificationLink) + } + break + } + case Platform.ENTERPRISE: { + if (data.user.tempToken) { + const user = await this.userService.readUserByToken(data.user.tempToken, queryRunner) + if (!user) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + if (user.email !== data.user.email) + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, UserErrorMessage.INVALID_USER_EMAIL) + const name = data.user.name + if (data.user.credential) user.credential = this.userService.encryptUserCredential(data.user.credential) + data.user = user + const organizationUser = await this.organizationUserService.readOrganizationUserByUserId(user.id, queryRunner) + if (!organizationUser) + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, OrganizationUserErrorMessage.ORGANIZATION_USER_NOT_FOUND) + const assignedOrganization = await this.organizationservice.readOrganizationById( + organizationUser[0].organizationId, + queryRunner + ) + if (!assignedOrganization) + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, OrganizationErrorMessage.ORGANIZATION_NOT_FOUND) + data.organization = assignedOrganization + const tokenExpiry = new Date(user.tokenExpiry!) + const today = new Date() + if (today > tokenExpiry) throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, UserErrorMessage.EXPIRED_TEMP_TOKEN) + data.user.tempToken = '' + data.user.tokenExpiry = null + data.user.name = name + data.user.status = UserStatus.ACTIVE + data.organizationUser.status = OrganizationUserStatus.ACTIVE + data.organizationUser.role = await this.roleService.readGeneralRoleByName(GeneralRole.MEMBER, queryRunner) + data.workspace.name = WorkspaceName.DEFAULT_PERSONAL_WORKSPACE + data.workspaceUser.role = await this.roleService.readGeneralRoleByName(GeneralRole.PERSONAL_WORKSPACE, queryRunner) + } else { + data.organizationUser.role = await this.roleService.readGeneralRoleByName(GeneralRole.OWNER, queryRunner) + data.workspace.name = WorkspaceName.DEFAULT_WORKSPACE + data.workspaceUser.role = data.organizationUser.role + data.user.status = UserStatus.ACTIVE + data.user = await this.userService.createNewUser(data.user, queryRunner) + } + break + } + default: + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, GeneralErrorMessage.UNHANDLED_EDGE_CASE) + } + + if (!data.organization.id) { + data.organization.createdBy = data.user.createdBy + data.organization = this.organizationservice.createNewOrganization(data.organization, queryRunner, true) + } + data.organizationUser.organizationId = data.organization.id + data.organizationUser.userId = data.user.id + data.organizationUser.createdBy = data.user.createdBy + data.organizationUser = this.organizationUserService.createNewOrganizationUser(data.organizationUser, queryRunner) + data.workspace.organizationId = data.organization.id + data.workspace.createdBy = data.user.createdBy + data.workspace = this.workspaceService.createNewWorkspace(data.workspace, queryRunner, true) + data.workspaceUser.workspaceId = data.workspace.id + data.workspaceUser.userId = data.user.id + data.workspaceUser.createdBy = data.user.createdBy + data.workspaceUser.status = WorkspaceUserStatus.ACTIVE + data.workspaceUser = this.workspaceUserService.createNewWorkspaceUser(data.workspaceUser, queryRunner) + + return data + } + + private async saveRegisterAccount(data: AccountDTO) { + const queryRunner = this.dataSource.createQueryRunner() + await queryRunner.connect() + const platform = this.identityManager.getPlatformType() + const ownerRole = await this.roleService.readGeneralRoleByName(GeneralRole.OWNER, queryRunner) + + try { + data = await this.createRegisterAccount(data, queryRunner) + + await queryRunner.startTransaction() + data.user = await this.userService.saveUser(data.user, queryRunner) + data.organization = await this.organizationservice.saveOrganization(data.organization, queryRunner) + data.organizationUser = await this.organizationUserService.saveOrganizationUser(data.organizationUser, queryRunner) + data.workspace = await this.workspaceService.saveWorkspace(data.workspace, queryRunner) + data.workspaceUser = await this.workspaceUserService.saveWorkspaceUser(data.workspaceUser, queryRunner) + if ( + data.workspace.id && + (platform === Platform.OPEN_SOURCE || platform === Platform.ENTERPRISE) && + ownerRole.id === data.organizationUser.roleId + ) { + await this.workspaceService.setNullWorkspaceId(queryRunner, data.workspace.id) + } + await queryRunner.commitTransaction() + + delete data.user.credential + delete data.user.tempToken + delete data.user.tokenExpiry + + return data + } catch (error) { + if (queryRunner && queryRunner.isTransactionActive) await queryRunner.rollbackTransaction() + throw error + } finally { + if (queryRunner && !queryRunner.isReleased) await queryRunner.release() + } + } + + public async register(data: AccountDTO) { + return await this.saveRegisterAccount(data) + } + + private async saveInviteAccount(data: AccountDTO, currentUser?: Express.User) { + data = this.initializeAccountDTO(data) + const queryRunner = this.dataSource.createQueryRunner() + await queryRunner.connect() + + try { + const workspace = await this.workspaceService.readWorkspaceById(data.workspace.id, queryRunner) + if (!workspace) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, WorkspaceErrorMessage.WORKSPACE_NOT_FOUND) + data.workspace = workspace + + const totalOrgUsers = await this.organizationUserService.readOrgUsersCountByOrgId(data.workspace.organizationId || '') + const subscriptionId = currentUser?.activeOrganizationSubscriptionId || '' + + const role = await this.roleService.readRoleByRoleIdOrganizationId(data.role.id, data.workspace.organizationId, queryRunner) + if (!role) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, RoleErrorMessage.ROLE_NOT_FOUND) + data.role = role + const user = await this.userService.readUserByEmail(data.user.email, queryRunner) + if (!user) { + await checkUsageLimit('users', subscriptionId, getRunningExpressApp().usageCacheManager, totalOrgUsers + 1) + + // generate a temporary token + data.user.tempToken = generateTempToken() + const tokenExpiry = new Date() + // set expiry based on env setting and fallback to 24 hours + const expiryInHours = process.env.INVITE_TOKEN_EXPIRY_IN_HOURS ? parseInt(process.env.INVITE_TOKEN_EXPIRY_IN_HOURS) : 24 + tokenExpiry.setHours(tokenExpiry.getHours() + expiryInHours) + data.user.tokenExpiry = tokenExpiry + data.user.status = UserStatus.INVITED + // send invite + const registerLink = + this.identityManager.getPlatformType() === Platform.ENTERPRISE + ? `${process.env.APP_URL}/register?token=${data.user.tempToken}` + : `${process.env.APP_URL}/register` + await sendWorkspaceInvite(data.user.email!, data.workspace.name!, registerLink, this.identityManager.getPlatformType()) + data.user = await this.userService.createNewUser(data.user, queryRunner) + + data.organizationUser.organizationId = data.workspace.organizationId + data.organizationUser.userId = data.user.id + const roleMember = await this.roleService.readGeneralRoleByName(GeneralRole.MEMBER, queryRunner) + data.organizationUser.roleId = roleMember.id + data.organizationUser.createdBy = data.user.createdBy + data.organizationUser.status = OrganizationUserStatus.INVITED + data.organizationUser = await this.organizationUserService.createNewOrganizationUser(data.organizationUser, queryRunner) + + workspace.updatedBy = data.user.createdBy + + data.workspaceUser.workspaceId = data.workspace.id + data.workspaceUser.userId = data.user.id + data.workspaceUser.roleId = data.role.id + data.workspaceUser.createdBy = data.user.createdBy + data.workspaceUser.status = WorkspaceUserStatus.INVITED + data.workspaceUser = await this.workspaceUserService.createNewWorkspaceUser(data.workspaceUser, queryRunner) + + await queryRunner.startTransaction() + data.user = await this.userService.saveUser(data.user, queryRunner) + await this.workspaceService.saveWorkspace(workspace, queryRunner) + data.organizationUser = await this.organizationUserService.saveOrganizationUser(data.organizationUser, queryRunner) + data.workspaceUser = await this.workspaceUserService.saveWorkspaceUser(data.workspaceUser, queryRunner) + data.role = await this.roleService.saveRole(data.role, queryRunner) + await queryRunner.commitTransaction() + delete data.user.credential + delete data.user.tempToken + delete data.user.tokenExpiry + + return data + } + const { organizationUser } = await this.organizationUserService.readOrganizationUserByOrganizationIdUserId( + data.workspace.organizationId, + user.id, + queryRunner + ) + if (!organizationUser) { + await checkUsageLimit('users', subscriptionId, getRunningExpressApp().usageCacheManager, totalOrgUsers + 1) + data.organizationUser.organizationId = data.workspace.organizationId + data.organizationUser.userId = user.id + const roleMember = await this.roleService.readGeneralRoleByName(GeneralRole.MEMBER, queryRunner) + data.organizationUser.roleId = roleMember.id + data.organizationUser.createdBy = data.user.createdBy + data.organizationUser.status = OrganizationUserStatus.INVITED + data.organizationUser = await this.organizationUserService.createNewOrganizationUser(data.organizationUser, queryRunner) + } else { + data.organizationUser = organizationUser + } + + let oldWorkspaceUser + if (data.organizationUser.status === OrganizationUserStatus.INVITED) { + const workspaceUser = await this.workspaceUserService.readWorkspaceUserByOrganizationIdUserId( + data.workspace.organizationId, + user.id, + queryRunner + ) + let registerLink: string + if (this.identityManager.getPlatformType() === Platform.ENTERPRISE) { + data.user = user + data.user.tempToken = generateTempToken() + const tokenExpiry = new Date() + const expiryInHours = process.env.INVITE_TOKEN_EXPIRY_IN_HOURS ? parseInt(process.env.INVITE_TOKEN_EXPIRY_IN_HOURS) : 24 + tokenExpiry.setHours(tokenExpiry.getHours() + expiryInHours) + data.user.tokenExpiry = tokenExpiry + await this.userService.saveUser(data.user, queryRunner) + registerLink = `${process.env.APP_URL}/register?token=${data.user.tempToken}` + } else { + registerLink = `${process.env.APP_URL}/register` + } + if (workspaceUser.length === 1) { + oldWorkspaceUser = workspaceUser[0] + if (oldWorkspaceUser.workspace.name === WorkspaceName.DEFAULT_PERSONAL_WORKSPACE) { + await sendWorkspaceInvite( + data.user.email!, + data.workspace.name!, + registerLink, + this.identityManager.getPlatformType() + ) + } else { + await sendWorkspaceInvite( + data.user.email!, + data.workspace.name!, + registerLink, + this.identityManager.getPlatformType(), + 'update' + ) + } + } else { + await sendWorkspaceInvite(data.user.email!, data.workspace.name!, registerLink, this.identityManager.getPlatformType()) + } + } else { + data.organizationUser.updatedBy = data.user.createdBy + + const dashboardLink = `${process.env.APP_URL}` + await sendWorkspaceAdd(data.user.email!, data.workspace.name!, dashboardLink) + } + + workspace.updatedBy = data.user.createdBy + + data.workspaceUser.workspaceId = data.workspace.id + data.workspaceUser.userId = user.id + data.workspaceUser.roleId = data.role.id + data.workspaceUser.createdBy = data.user.createdBy + data.workspaceUser.status = WorkspaceUserStatus.INVITED + data.workspaceUser = await this.workspaceUserService.createNewWorkspaceUser(data.workspaceUser, queryRunner) + + const personalWorkspaceRole = await this.roleService.readGeneralRoleByName(GeneralRole.PERSONAL_WORKSPACE, queryRunner) + if (oldWorkspaceUser && oldWorkspaceUser.roleId !== personalWorkspaceRole.id) { + await this.workspaceUserService.deleteWorkspaceUser(oldWorkspaceUser.workspaceId, user.id) + } + + await queryRunner.startTransaction() + data.organizationUser = await this.organizationUserService.saveOrganizationUser(data.organizationUser, queryRunner) + await this.workspaceService.saveWorkspace(workspace, queryRunner) + data.workspaceUser = await this.workspaceUserService.saveWorkspaceUser(data.workspaceUser, queryRunner) + data.role = await this.roleService.saveRole(data.role, queryRunner) + await queryRunner.commitTransaction() + + return data + } catch (error) { + if (queryRunner && queryRunner.isTransactionActive) await queryRunner.rollbackTransaction() + throw error + } finally { + if (queryRunner && !queryRunner.isReleased) await queryRunner.release() + } + } + + public async invite(data: AccountDTO, user?: Express.User) { + return await this.saveInviteAccount(data, user) + } + + public async login(data: AccountDTO) { + data = this.initializeAccountDTO(data) + const queryRunner = this.dataSource.createQueryRunner() + await queryRunner.connect() + const platform = this.identityManager.getPlatformType() + try { + if (!data.user.credential) { + await auditService.recordLoginActivity(data.user.email || '', LoginActivityCode.INCORRECT_CREDENTIAL, 'Login Failed') + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, UserErrorMessage.INVALID_USER_CREDENTIAL) + } + const user = await this.userService.readUserByEmail(data.user.email, queryRunner) + if (!user) { + await auditService.recordLoginActivity(data.user.email || '', LoginActivityCode.UNKNOWN_USER, 'Login Failed') + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + } + if (!user.credential) { + await auditService.recordLoginActivity(user.email || '', LoginActivityCode.INCORRECT_CREDENTIAL, 'Login Failed') + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, UserErrorMessage.INVALID_USER_CREDENTIAL) + } + if (!compareHash(data.user.credential, user.credential)) { + await auditService.recordLoginActivity(user.email || '', LoginActivityCode.INCORRECT_CREDENTIAL, 'Login Failed') + throw new InternalFlowiseError(StatusCodes.UNAUTHORIZED, UserErrorMessage.INCORRECT_USER_EMAIL_OR_CREDENTIALS) + } + if (user.status === UserStatus.UNVERIFIED) { + await auditService.recordLoginActivity(data.user.email || '', LoginActivityCode.REGISTRATION_PENDING, 'Login Failed') + throw new InternalFlowiseError(StatusCodes.UNAUTHORIZED, UserErrorMessage.USER_EMAIL_UNVERIFIED) + } + let wsUserOrUsers = await this.workspaceUserService.readWorkspaceUserByLastLogin(user.id, queryRunner) + if (Array.isArray(wsUserOrUsers)) { + if (wsUserOrUsers.length > 0) { + wsUserOrUsers = wsUserOrUsers[0] + } else { + await auditService.recordLoginActivity(user.email || '', LoginActivityCode.NO_ASSIGNED_WORKSPACE, 'Login Failed') + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, WorkspaceUserErrorMessage.WORKSPACE_USER_NOT_FOUND) + } + } + if (platform === Platform.ENTERPRISE) { + await auditService.recordLoginActivity(user.email, LoginActivityCode.LOGIN_SUCCESS, 'Login Success') + } + return { user, workspaceDetails: wsUserOrUsers } + } finally { + await queryRunner.release() + } + } + + public async verify(data: AccountDTO) { + data = this.initializeAccountDTO(data) + const queryRunner = this.dataSource.createQueryRunner() + await queryRunner.connect() + try { + await queryRunner.startTransaction() + if (!data.user.tempToken) throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, UserErrorMessage.INVALID_TEMP_TOKEN) + const user = await this.userService.readUserByToken(data.user.tempToken, queryRunner) + if (!user) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + data.user = user + data.user.tempToken = '' + data.user.tokenExpiry = null + data.user.status = UserStatus.ACTIVE + data.user = await this.userService.saveUser(data.user, queryRunner) + await queryRunner.commitTransaction() + } catch (error) { + await queryRunner.rollbackTransaction() + throw error + } finally { + await queryRunner.release() + } + + return data + } + + public async forgotPassword(data: AccountDTO) { + data = this.initializeAccountDTO(data) + const queryRunner = this.dataSource.createQueryRunner() + await queryRunner.connect() + try { + await queryRunner.startTransaction() + const user = await this.userService.readUserByEmail(data.user.email, queryRunner) + if (!user) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + + data.user = user + data.user.tempToken = generateTempToken() + const tokenExpiry = new Date() + const expiryInMins = process.env.PASSWORD_RESET_TOKEN_EXPIRY_IN_MINUTES + ? parseInt(process.env.PASSWORD_RESET_TOKEN_EXPIRY_IN_MINUTES) + : 15 + tokenExpiry.setMinutes(tokenExpiry.getMinutes() + expiryInMins) + data.user.tokenExpiry = tokenExpiry + data.user = await this.userService.saveUser(data.user, queryRunner) + const resetLink = `${process.env.APP_URL}/reset-password?token=${data.user.tempToken}` + await sendPasswordResetEmail(data.user.email!, resetLink) + await queryRunner.commitTransaction() + } catch (error) { + await queryRunner.rollbackTransaction() + throw error + } finally { + await queryRunner.release() + } + + return data + } + + public async resetPassword(data: AccountDTO) { + data = this.initializeAccountDTO(data) + const queryRunner = this.dataSource.createQueryRunner() + await queryRunner.connect() + try { + const user = await this.userService.readUserByEmail(data.user.email, queryRunner) + if (!user) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + if (user.tempToken !== data.user.tempToken) + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, UserErrorMessage.INVALID_TEMP_TOKEN) + + const tokenExpiry = user.tokenExpiry + const now = moment() + const expiryInMins = process.env.PASSWORD_RESET_TOKEN_EXPIRY_IN_MINUTES + ? parseInt(process.env.PASSWORD_RESET_TOKEN_EXPIRY_IN_MINUTES) + : 15 + const diff = now.diff(tokenExpiry, 'minutes') + if (Math.abs(diff) > expiryInMins) throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, UserErrorMessage.EXPIRED_TEMP_TOKEN) + + // all checks are done, now update the user password, don't forget to hash it and do not forget to clear the temp token + // leave the user status and other details as is + const salt = bcrypt.genSaltSync(parseInt(process.env.PASSWORD_SALT_HASH_ROUNDS || '5')) + // @ts-ignore + const hash = bcrypt.hashSync(data.user.password, salt) + data.user = user + data.user.credential = hash + data.user.tempToken = '' + data.user.tokenExpiry = undefined + data.user.status = UserStatus.ACTIVE + + await queryRunner.startTransaction() + data.user = await this.userService.saveUser(data.user, queryRunner) + await queryRunner.commitTransaction() + } catch (error) { + await queryRunner.rollbackTransaction() + throw error + } finally { + await queryRunner.release() + } + + return data + } + + public async logout(user: LoggedInUser) { + const platform = this.identityManager.getPlatformType() + if (platform === Platform.ENTERPRISE) { + await auditService.recordLoginActivity( + user.email, + LoginActivityCode.LOGOUT_SUCCESS, + 'Logout Success', + user.ssoToken ? 'SSO' : 'Email/Password' + ) + } + } +} diff --git a/packages/server/src/enterprise/services/audit/index.ts b/packages/server/src/enterprise/services/audit/index.ts new file mode 100644 index 00000000000..a25204247a0 --- /dev/null +++ b/packages/server/src/enterprise/services/audit/index.ts @@ -0,0 +1,109 @@ +import { getRunningExpressApp } from '../../../utils/getRunningExpressApp' +import { LoginActivity } from '../../database/entities/EnterpriseEntities' +import { InternalFlowiseError } from '../../../errors/internalFlowiseError' +import { StatusCodes } from 'http-status-codes' +import { getErrorMessage } from '../../../errors/utils' +import { Between, In } from 'typeorm' +import { LoginActivityCode } from '../../Interface.Enterprise' +import { Platform } from '../../../Interface' + +const PAGE_SIZE = 10 + +const aMonthAgo = () => { + const date = new Date() + date.setMonth(new Date().getMonth() - 1) + return date +} + +const setDateToStartOrEndOfDay = (dateTimeStr: string, setHours: 'start' | 'end') => { + const date = new Date(dateTimeStr) + if (isNaN(date.getTime())) { + return undefined + } + setHours === 'start' ? date.setHours(0, 0, 0, 0) : date.setHours(23, 59, 59, 999) + return date +} + +const fetchLoginActivity = async (body: any) => { + try { + const page = body.pageNo ? parseInt(body.pageNo) : 1 + const skip = (page - 1) * PAGE_SIZE + const take = PAGE_SIZE + const appServer = getRunningExpressApp() + + let fromDate + if (body.startDate) fromDate = setDateToStartOrEndOfDay(body.startDate, 'start') + + let toDate + if (body.endDate) toDate = setDateToStartOrEndOfDay(body.endDate, 'end') + + const whereCondition: any = { + attemptedDateTime: Between(fromDate ?? aMonthAgo(), toDate ?? new Date()) + } + if (body.activityCodes && body.activityCodes?.length > 0) { + whereCondition['activityCode'] = In(body.activityCodes) + } + const count = await appServer.AppDataSource.getRepository(LoginActivity).count({ + where: whereCondition + }) + const pagedResults = await appServer.AppDataSource.getRepository(LoginActivity).find({ + where: whereCondition, + order: { + attemptedDateTime: 'DESC' + }, + skip, + take + }) + return { + data: pagedResults, + count: count, + currentPage: page, + pageSize: PAGE_SIZE + } + } catch (error) { + throw new InternalFlowiseError( + StatusCodes.INTERNAL_SERVER_ERROR, + `Error: auditService.getLoginActivity - ${getErrorMessage(error)}` + ) + } +} + +const recordLoginActivity = async (username: string, activityCode: LoginActivityCode, message: string, ssoProvider?: string) => { + try { + const appServer = getRunningExpressApp() + const platform = appServer.identityManager.getPlatformType() + if (platform !== Platform.ENTERPRISE) { + return + } + const loginMode = ssoProvider ?? 'Email/Password' + const loginActivity = appServer.AppDataSource.getRepository(LoginActivity).create({ + username, + activityCode, + message, + loginMode + }) + const result = await appServer.AppDataSource.getRepository(LoginActivity).save(loginActivity) + return result + } catch (error) { + throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `Error: authService.loginActivity - ${getErrorMessage(error)}`) + } +} + +const deleteLoginActivity = async (body: any) => { + try { + const appServer = getRunningExpressApp() + + await appServer.AppDataSource.getRepository(LoginActivity).delete({ + id: In(body.selected) + }) + return 'OK' + } catch (error) { + throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `Error: authService.loginActivity - ${getErrorMessage(error)}`) + } +} + +export default { + recordLoginActivity, + deleteLoginActivity, + fetchLoginActivity +} diff --git a/packages/server/src/enterprise/services/login-method.service.ts b/packages/server/src/enterprise/services/login-method.service.ts new file mode 100644 index 00000000000..5523f419171 --- /dev/null +++ b/packages/server/src/enterprise/services/login-method.service.ts @@ -0,0 +1,184 @@ +import { DataSource, QueryRunner } from 'typeorm' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' +import { isInvalidName, isInvalidUUID } from '../utils/validation.util' +import { InternalFlowiseError } from '../../errors/internalFlowiseError' +import { StatusCodes } from 'http-status-codes' +import { LoginMethod, LoginMethodStatus } from '../database/entities/login-method.entity' +import { decrypt, encrypt } from '../utils/encryption.util' +import { UserErrorMessage, UserService } from './user.service' +import { OrganizationErrorMessage, OrganizationService } from './organization.service' +import { IsNull } from 'typeorm' + +export const enum LoginMethodErrorMessage { + INVALID_LOGIN_METHOD_ID = 'Invalid Login Method Id', + INVALID_LOGIN_METHOD_NAME = 'Invalid Login Method Name', + INVALID_LOGIN_METHOD_STATUS = 'Invalid Login Method Status', + INVALID_LOGIN_METHOD_CONFIG = 'Invalid Login Method Config', + LOGIN_METHOD_NOT_FOUND = 'Login Method Not Found' +} + +export class LoginMethodService { + private dataSource: DataSource + private userService: UserService + private organizationService: OrganizationService + + constructor() { + const appServer = getRunningExpressApp() + this.dataSource = appServer.AppDataSource + this.userService = new UserService() + this.organizationService = new OrganizationService() + } + + public validateLoginMethodId(id: string | undefined) { + if (isInvalidUUID(id)) throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, LoginMethodErrorMessage.INVALID_LOGIN_METHOD_ID) + } + + public async readLoginMethodById(id: string | undefined, queryRunner: QueryRunner) { + this.validateLoginMethodId(id) + return await queryRunner.manager.findOneBy(LoginMethod, { id }) + } + + public validateLoginMethodName(name: string | undefined) { + if (isInvalidName(name)) throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, LoginMethodErrorMessage.INVALID_LOGIN_METHOD_NAME) + } + + public validateLoginMethodStatus(status: string | undefined) { + if (status && !Object.values(LoginMethodStatus).includes(status as LoginMethodStatus)) + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, LoginMethodErrorMessage.INVALID_LOGIN_METHOD_STATUS) + } + + public async readLoginMethodByOrganizationId(organizationId: string | undefined, queryRunner: QueryRunner) { + if (organizationId) { + const organization = await this.organizationService.readOrganizationById(organizationId, queryRunner) + if (!organization) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, OrganizationErrorMessage.ORGANIZATION_NOT_FOUND) + return await queryRunner.manager.findBy(LoginMethod, { organizationId }) + } else { + return await queryRunner.manager.findBy(LoginMethod, { organizationId: IsNull() }) + } + } + + public async encryptLoginMethodConfig(config: string | undefined) { + if (!config) throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, LoginMethodErrorMessage.INVALID_LOGIN_METHOD_STATUS) + return await encrypt(config) + } + + public async decryptLoginMethodConfig(config: string | undefined) { + if (!config) throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, LoginMethodErrorMessage.INVALID_LOGIN_METHOD_STATUS) + return await decrypt(config) + } + + private async saveLoginMethod(data: Partial, queryRunner: QueryRunner) { + return await queryRunner.manager.save(LoginMethod, data) + } + + public async createLoginMethod(data: Partial) { + let queryRunner: QueryRunner | undefined + let newLoginMethod: Partial + try { + queryRunner = this.dataSource.createQueryRunner() + await queryRunner.connect() + const createdBy = await this.userService.readUserById(data.createdBy, queryRunner) + if (!createdBy) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + const organization = await this.organizationService.readOrganizationById(data.organizationId, queryRunner) + if (!organization) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, OrganizationErrorMessage.ORGANIZATION_NOT_FOUND) + this.validateLoginMethodName(data.name) + this.validateLoginMethodStatus(data.status) + data.config = await this.encryptLoginMethodConfig(data.config) + data.updatedBy = createdBy.id + + newLoginMethod = await queryRunner.manager.create(LoginMethod, data) + await queryRunner.startTransaction() + newLoginMethod = await this.saveLoginMethod(newLoginMethod, queryRunner) + await queryRunner.commitTransaction() + } catch (error) { + if (queryRunner && !queryRunner.isTransactionActive) await queryRunner.rollbackTransaction() + throw error + } finally { + if (queryRunner && !queryRunner.isReleased) await queryRunner.release() + } + + return newLoginMethod + } + + public async createOrUpdateConfig(body: any) { + let organizationId: string = body.organizationId + let providers: any[] = body.providers + let userId: string = body.userId + + let queryRunner + try { + queryRunner = this.dataSource.createQueryRunner() + await queryRunner.connect() + await queryRunner.startTransaction() + const createdOrUpdatedByUser = await this.userService.readUserById(userId, queryRunner) + if (!createdOrUpdatedByUser) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + const organization = await this.organizationService.readOrganizationById(organizationId, queryRunner) + if (!organization) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, OrganizationErrorMessage.ORGANIZATION_NOT_FOUND) + + for (let provider of providers) { + this.validateLoginMethodName(provider.providerName) + this.validateLoginMethodStatus(provider.status) + + const name = provider.providerName + const loginMethod = await queryRunner.manager.findOneBy(LoginMethod, { organizationId, name }) + if (loginMethod) { + /* empty */ + loginMethod.status = provider.status + loginMethod.config = await this.encryptLoginMethodConfig(JSON.stringify(provider.config)) + loginMethod.updatedBy = userId + await this.saveLoginMethod(loginMethod, queryRunner) + } else { + const encryptedConfig = await this.encryptLoginMethodConfig(JSON.stringify(provider.config)) + let newLoginMethod = queryRunner.manager.create(LoginMethod, { + organizationId, + name, + status: provider.status, + config: encryptedConfig, + createdBy: userId, + updatedBy: userId + }) + await this.saveLoginMethod(newLoginMethod, queryRunner) + } + } + await queryRunner.commitTransaction() + } catch (error) { + if (queryRunner) await queryRunner.rollbackTransaction() + throw error + } finally { + if (queryRunner) await queryRunner.release() + } + return { status: 'OK', organizationId: organizationId } + } + + public async updateLoginMethod(newLoginMethod: Partial) { + const queryRunner = this.dataSource.createQueryRunner() + await queryRunner.connect() + + const oldLoginMethod = await this.readLoginMethodById(newLoginMethod.id, queryRunner) + if (!oldLoginMethod) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, LoginMethodErrorMessage.LOGIN_METHOD_NOT_FOUND) + const updatedBy = await this.userService.readUserById(newLoginMethod.updatedBy, queryRunner) + if (!updatedBy) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + if (newLoginMethod.organizationId) { + const organization = await this.organizationService.readOrganizationById(newLoginMethod.organizationId, queryRunner) + if (!organization) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, OrganizationErrorMessage.ORGANIZATION_NOT_FOUND) + } + if (newLoginMethod.name) this.validateLoginMethodName(newLoginMethod.name) + if (newLoginMethod.config) newLoginMethod.config = await this.encryptLoginMethodConfig(newLoginMethod.config) + if (newLoginMethod.status) this.validateLoginMethodStatus(newLoginMethod.status) + newLoginMethod.createdBy = oldLoginMethod.createdBy + + let updateLoginMethod = queryRunner.manager.merge(LoginMethod, newLoginMethod) + try { + await queryRunner.startTransaction() + updateLoginMethod = await this.saveLoginMethod(updateLoginMethod, queryRunner) + await queryRunner.commitTransaction() + } catch (error) { + await queryRunner.rollbackTransaction() + throw error + } finally { + await queryRunner.release() + } + + return updateLoginMethod + } +} diff --git a/packages/server/src/enterprise/services/organization-user.service.ts b/packages/server/src/enterprise/services/organization-user.service.ts new file mode 100644 index 00000000000..6e7eee717b2 --- /dev/null +++ b/packages/server/src/enterprise/services/organization-user.service.ts @@ -0,0 +1,336 @@ +import { StatusCodes } from 'http-status-codes' +import { DataSource, Not, QueryRunner } from 'typeorm' +import { InternalFlowiseError } from '../../errors/internalFlowiseError' +import { GeneralErrorMessage } from '../../utils/constants' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' +import { OrganizationUser, OrganizationUserStatus } from '../database/entities/organization-user.entity' +import { Organization } from '../database/entities/organization.entity' +import { GeneralRole } from '../database/entities/role.entity' +import { WorkspaceUser } from '../database/entities/workspace-user.entity' +import { Workspace } from '../database/entities/workspace.entity' +import { OrganizationErrorMessage, OrganizationService } from './organization.service' +import { RoleErrorMessage, RoleService } from './role.service' +import { UserErrorMessage, UserService } from './user.service' +import { WorkspaceUserErrorMessage } from './workspace-user.service' + +export const enum OrganizationUserErrorMessage { + INVALID_ORGANIZATION_USER_SATUS = 'Invalid Organization User Status', + ORGANIZATION_USER_ALREADY_EXISTS = 'Organization User Already Exists', + ORGANIZATION_USER_NOT_FOUND = 'Organization User Not Found' +} + +export class OrganizationUserService { + private dataSource: DataSource + private userService: UserService + private organizationService: OrganizationService + private roleService: RoleService + + constructor() { + const appServer = getRunningExpressApp() + this.dataSource = appServer.AppDataSource + this.userService = new UserService() + this.organizationService = new OrganizationService() + this.roleService = new RoleService() + } + + public validateOrganizationUserStatus(status: string | undefined) { + if (status && !Object.values(OrganizationUserStatus).includes(status as OrganizationUserStatus)) + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, OrganizationUserErrorMessage.INVALID_ORGANIZATION_USER_SATUS) + } + + public async readOrganizationUserByOrganizationIdUserId( + organizationId: string | undefined, + userId: string | undefined, + queryRunner: QueryRunner + ) { + const organization = await this.organizationService.readOrganizationById(organizationId, queryRunner) + if (!organization) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, OrganizationErrorMessage.ORGANIZATION_NOT_FOUND) + const user = await this.userService.readUserById(userId, queryRunner) + if (!user) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + const ownerRole = await this.roleService.readGeneralRoleByName(GeneralRole.OWNER, queryRunner) + + const organizationUser = await queryRunner.manager + .createQueryBuilder(OrganizationUser, 'organizationUser') + .innerJoinAndSelect('organizationUser.role', 'role') + .where('organizationUser.organizationId = :organizationId', { organizationId }) + .andWhere('organizationUser.userId = :userId', { userId }) + .getOne() + + return { + organization, + organizationUser: organizationUser + ? { + ...organizationUser, + isOrgOwner: organizationUser.roleId === ownerRole?.id + } + : null + } + } + + public async readOrganizationUserByWorkspaceIdUserId( + workspaceId: string | undefined, + userId: string | undefined, + queryRunner: QueryRunner + ) { + const workspace = await queryRunner.manager + .createQueryBuilder(WorkspaceUser, 'workspaceUser') + .innerJoinAndSelect('workspaceUser.workspace', 'workspace') + .innerJoinAndSelect('workspaceUser.user', 'user') + .innerJoinAndSelect('workspaceUser.role', 'role') + .where('workspace.id = :workspaceId', { workspaceId }) + .getOne() + if (!workspace) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, WorkspaceUserErrorMessage.WORKSPACE_USER_NOT_FOUND) + return await this.readOrganizationUserByOrganizationIdUserId(workspace.workspace.organizationId, userId, queryRunner) + } + + public async readOrganizationUserByOrganizationId(organizationId: string | undefined, queryRunner: QueryRunner) { + const organization = await this.organizationService.readOrganizationById(organizationId, queryRunner) + if (!organization) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, OrganizationErrorMessage.ORGANIZATION_NOT_FOUND) + const ownerRole = await this.roleService.readGeneralRoleByName(GeneralRole.OWNER, queryRunner) + + const organizationUsers = await queryRunner.manager + .createQueryBuilder(OrganizationUser, 'organizationUser') + .innerJoinAndSelect('organizationUser.user', 'user') + .innerJoinAndSelect('organizationUser.role', 'role') + .where('organizationUser.organizationId = :organizationId', { organizationId }) + .getMany() + + // Get workspace user last login for all users + const workspaceUsers = await queryRunner.manager + .createQueryBuilder(WorkspaceUser, 'workspaceUser') + .where('workspaceUser.userId IN (:...userIds)', { + userIds: organizationUsers.map((user) => user.userId) + }) + .orderBy('workspaceUser.lastLogin', 'ASC') + .getMany() + + const lastLoginMap = new Map(workspaceUsers.map((wu) => [wu.userId, wu.lastLogin])) + + return await Promise.all( + organizationUsers.map(async (organizationUser) => { + const workspaceUser = await queryRunner.manager.findBy(WorkspaceUser, { + userId: organizationUser.userId, + workspace: { organizationId: organizationId } + }) + delete organizationUser.user.credential + delete organizationUser.user.tempToken + delete organizationUser.user.tokenExpiry + return { + ...organizationUser, + isOrgOwner: organizationUser.roleId === ownerRole?.id, + lastLogin: lastLoginMap.get(organizationUser.userId) || null, + roleCount: workspaceUser.length + } + }) + ) + } + + public async readOrganizationUserByOrganizationIdRoleId( + organizationId: string | undefined, + roleId: string | undefined, + queryRunner: QueryRunner + ) { + const organization = await this.organizationService.readOrganizationById(organizationId, queryRunner) + if (!organization) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, OrganizationErrorMessage.ORGANIZATION_NOT_FOUND) + const role = await this.roleService.readRoleById(roleId, queryRunner) + if (!role) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, RoleErrorMessage.ROLE_NOT_FOUND) + const ownerRole = await this.roleService.readGeneralRoleByName(GeneralRole.OWNER, queryRunner) + + const orgUsers = await queryRunner.manager + .createQueryBuilder(OrganizationUser, 'organizationUser') + .innerJoinAndSelect('organizationUser.role', 'role') + .innerJoinAndSelect('organizationUser.user', 'user') + .where('organizationUser.organizationId = :organizationId', { organizationId }) + .andWhere('organizationUser.roleId = :roleId', { roleId }) + .getMany() + + return orgUsers.map((organizationUser) => { + delete organizationUser.user.credential + delete organizationUser.user.tempToken + delete organizationUser.user.tokenExpiry + return { + ...organizationUser, + isOrgOwner: organizationUser.roleId === ownerRole?.id + } + }) + } + + public async readOrganizationUserByUserId(userId: string | undefined, queryRunner: QueryRunner) { + const user = await this.userService.readUserById(userId, queryRunner) + if (!user) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + const ownerRole = await this.roleService.readGeneralRoleByName(GeneralRole.OWNER, queryRunner) + + const orgUsers = await queryRunner.manager + .createQueryBuilder(OrganizationUser, 'organizationUser') + .innerJoinAndSelect('organizationUser.role', 'role') + .where('organizationUser.userId = :userId', { userId }) + .getMany() + + const organizationUsers = orgUsers.map((user) => ({ + ...user, + isOrgOwner: user.roleId === ownerRole?.id + })) + + // loop through organizationUsers, get the organizationId, find the organization user with the ownerRole.id, and get the user's details + for (const user of organizationUsers) { + const organizationOwner = await this.readOrganizationUserByOrganizationIdRoleId(user.organizationId, ownerRole?.id, queryRunner) + if (organizationOwner.length === 1) { + // get the user's name and email + const userDetails = await this.userService.readUserById(organizationOwner[0].userId, queryRunner) + if (userDetails) { + user.user = userDetails + } + } + } + + return organizationUsers + } + + public async readOrgUsersCountByOrgId(organizationId: string): Promise { + try { + const appServer = getRunningExpressApp() + const dbResponse = await appServer.AppDataSource.getRepository(OrganizationUser).countBy({ + organizationId + }) + return dbResponse + } catch (error) { + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, OrganizationUserErrorMessage.ORGANIZATION_USER_NOT_FOUND) + } + } + + public createNewOrganizationUser(data: Partial, queryRunner: QueryRunner) { + if (data.status) this.validateOrganizationUserStatus(data.status) + data.updatedBy = data.createdBy + + return queryRunner.manager.create(OrganizationUser, data) + } + + public async saveOrganizationUser(data: Partial, queryRunner: QueryRunner) { + return await queryRunner.manager.save(OrganizationUser, data) + } + + public async createOrganizationUser(data: Partial) { + const queryRunner = this.dataSource.createQueryRunner() + await queryRunner.connect() + + const { organization, organizationUser } = await this.readOrganizationUserByOrganizationIdUserId( + data.organizationId, + data.userId, + queryRunner + ) + if (organizationUser) + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, OrganizationUserErrorMessage.ORGANIZATION_USER_ALREADY_EXISTS) + const role = await this.roleService.readRoleIsGeneral(data.roleId, queryRunner) + if (!role) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, RoleErrorMessage.ROLE_NOT_FOUND) + const createdBy = await this.userService.readUserById(data.createdBy, queryRunner) + if (!createdBy) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + + let newOrganizationUser = this.createNewOrganizationUser(data, queryRunner) + organization.updatedBy = data.createdBy + try { + await queryRunner.startTransaction() + newOrganizationUser = await this.saveOrganizationUser(newOrganizationUser, queryRunner) + await this.organizationService.saveOrganization(organization, queryRunner) + await queryRunner.commitTransaction() + } catch (error) { + await queryRunner.rollbackTransaction() + throw error + } finally { + await queryRunner.release() + } + + return newOrganizationUser + } + + public async createOrganization(data: Partial) { + const queryRunner = this.dataSource.createQueryRunner() + await queryRunner.connect() + + const user = await this.userService.readUserById(data.createdBy, queryRunner) + if (!user) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + + let newOrganization = this.organizationService.createNewOrganization(data, queryRunner) + + const role = await this.roleService.readGeneralRoleByName(GeneralRole.OWNER, queryRunner) + if (!role) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, RoleErrorMessage.ROLE_NOT_FOUND) + let newOrganizationUser: Partial = { + organizationId: newOrganization.id, + userId: user.id, + roleId: role.id, + createdBy: user.id + } + newOrganizationUser = this.createNewOrganizationUser(newOrganizationUser, queryRunner) + try { + await queryRunner.startTransaction() + newOrganization = await this.organizationService.saveOrganization(newOrganization, queryRunner) + await this.saveOrganizationUser(newOrganizationUser, queryRunner) + await queryRunner.commitTransaction() + } catch (error) { + await queryRunner.rollbackTransaction() + throw error + } finally { + await queryRunner.release() + } + + return newOrganization + } + + public async updateOrganizationUser(newOrganizationUser: Partial) { + const queryRunner = this.dataSource.createQueryRunner() + await queryRunner.connect() + + const { organizationUser } = await this.readOrganizationUserByOrganizationIdUserId( + newOrganizationUser.organizationId, + newOrganizationUser.userId, + queryRunner + ) + if (!organizationUser) + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, OrganizationUserErrorMessage.ORGANIZATION_USER_NOT_FOUND) + + if (newOrganizationUser.roleId) { + const role = await this.roleService.readRoleIsGeneral(newOrganizationUser.roleId, queryRunner) + if (!role) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, RoleErrorMessage.ROLE_NOT_FOUND) + } + + if (newOrganizationUser.status) this.validateOrganizationUserStatus(newOrganizationUser.status) + + newOrganizationUser.createdBy = organizationUser.createdBy + + let updateOrganizationUser = queryRunner.manager.merge(OrganizationUser, organizationUser, newOrganizationUser) + try { + await queryRunner.startTransaction() + updateOrganizationUser = await this.saveOrganizationUser(updateOrganizationUser, queryRunner) + await queryRunner.commitTransaction() + } catch (error) { + await queryRunner.rollbackTransaction() + throw error + } finally { + await queryRunner.release() + } + + return updateOrganizationUser + } + + public async deleteOrganizationUser(queryRunner: QueryRunner, organizationId: string | undefined, userId: string | undefined) { + const { organizationUser } = await this.readOrganizationUserByOrganizationIdUserId(organizationId, userId, queryRunner) + if (!organizationUser) + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, OrganizationUserErrorMessage.ORGANIZATION_USER_NOT_FOUND) + const role = await this.roleService.readRoleById(organizationUser.roleId, queryRunner) + if (!role) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, RoleErrorMessage.ROLE_NOT_FOUND) + if (role.name === GeneralRole.OWNER) + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, GeneralErrorMessage.NOT_ALLOWED_TO_DELETE_OWNER) + + const rolePersonalWorkspace = await this.roleService.readGeneralRoleByName(GeneralRole.PERSONAL_WORKSPACE, queryRunner) + const organizationWorkspaces = await queryRunner.manager.findBy(Workspace, { organizationId }) + const workspaceUserToDelete = organizationWorkspaces.map((organizationWorkspace) => ({ + workspaceId: organizationWorkspace.id, + userId: organizationUser.userId, + roleId: Not(rolePersonalWorkspace.id) + })) + + await queryRunner.manager.delete(OrganizationUser, { organizationId, userId }) + await queryRunner.manager.delete(WorkspaceUser, workspaceUserToDelete) + + return organizationUser + } +} diff --git a/packages/server/src/enterprise/services/organization.service.ts b/packages/server/src/enterprise/services/organization.service.ts new file mode 100644 index 00000000000..9ee11546712 --- /dev/null +++ b/packages/server/src/enterprise/services/organization.service.ts @@ -0,0 +1,121 @@ +import { StatusCodes } from 'http-status-codes' +import { DataSource, QueryRunner } from 'typeorm' +import { InternalFlowiseError } from '../../errors/internalFlowiseError' +import { generateId } from '../../utils' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' +import { Telemetry } from '../../utils/telemetry' +import { Organization, OrganizationName } from '../database/entities/organization.entity' +import { isInvalidName, isInvalidUUID } from '../utils/validation.util' +import { UserErrorMessage, UserService } from './user.service' + +export const enum OrganizationErrorMessage { + INVALID_ORGANIZATION_ID = 'Invalid Organization Id', + INVALID_ORGANIZATION_NAME = 'Invalid Organization Name', + ORGANIZATION_NOT_FOUND = 'Organization Not Found', + ORGANIZATION_FOUND_MULTIPLE = 'Organization Found Multiple', + ORGANIZATION_RESERVERD_NAME = 'Organization name cannot be Default Organization - this is a reserved name' +} + +export class OrganizationService { + private telemetry: Telemetry + private dataSource: DataSource + private userService: UserService + + constructor() { + const appServer = getRunningExpressApp() + this.dataSource = appServer.AppDataSource + this.telemetry = appServer.telemetry + this.userService = new UserService() + } + + public validateOrganizationId(id: string | undefined) { + if (isInvalidUUID(id)) throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, OrganizationErrorMessage.INVALID_ORGANIZATION_ID) + } + + public async readOrganizationById(id: string | undefined, queryRunner: QueryRunner) { + this.validateOrganizationId(id) + return await queryRunner.manager.findOneBy(Organization, { id }) + } + + public validateOrganizationName(name: string | undefined, isRegister: boolean = false) { + if (isInvalidName(name)) throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, OrganizationErrorMessage.INVALID_ORGANIZATION_NAME) + if (!isRegister && name === OrganizationName.DEFAULT_ORGANIZATION) { + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, OrganizationErrorMessage.ORGANIZATION_RESERVERD_NAME) + } + } + + public async readOrganizationByName(name: string | undefined, queryRunner: QueryRunner) { + this.validateOrganizationName(name) + return await queryRunner.manager.findOneBy(Organization, { name }) + } + + public async countOrganizations(queryRunner: QueryRunner) { + return await queryRunner.manager.count(Organization) + } + + public async readOrganization(queryRunner: QueryRunner) { + return await queryRunner.manager.find(Organization) + } + + public createNewOrganization(data: Partial, queryRunner: QueryRunner, isRegister: boolean = false) { + this.validateOrganizationName(data.name, isRegister) + data.updatedBy = data.createdBy + data.id = generateId() + + return queryRunner.manager.create(Organization, data) + } + + public async saveOrganization(data: Partial, queryRunner: QueryRunner) { + return await queryRunner.manager.save(Organization, data) + } + + public async createOrganization(data: Partial) { + const queryRunner = this.dataSource.createQueryRunner() + await queryRunner.connect() + + const user = await this.userService.readUserById(data.createdBy, queryRunner) + if (!user) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + + let newOrganization = this.createNewOrganization(data, queryRunner) + try { + await queryRunner.startTransaction() + newOrganization = await this.saveOrganization(newOrganization, queryRunner) + await queryRunner.commitTransaction() + } catch (error) { + await queryRunner.rollbackTransaction() + throw error + } finally { + await queryRunner.release() + } + + return newOrganization + } + + public async updateOrganization(newOrganizationData: Partial) { + const queryRunner = this.dataSource.createQueryRunner() + await queryRunner.connect() + + const oldOrganizationData = await this.readOrganizationById(newOrganizationData.id, queryRunner) + if (!oldOrganizationData) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, OrganizationErrorMessage.ORGANIZATION_NOT_FOUND) + const user = await this.userService.readUserById(newOrganizationData.updatedBy, queryRunner) + if (!user) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + if (newOrganizationData.name) { + this.validateOrganizationName(newOrganizationData.name) + } + newOrganizationData.createdBy = oldOrganizationData.createdBy + + let updateOrganization = queryRunner.manager.merge(Organization, oldOrganizationData, newOrganizationData) + try { + await queryRunner.startTransaction() + await this.saveOrganization(updateOrganization, queryRunner) + await queryRunner.commitTransaction() + } catch (error) { + await queryRunner.rollbackTransaction() + throw error + } finally { + await queryRunner.release() + } + + return updateOrganization + } +} diff --git a/packages/server/src/enterprise/services/role.service.ts b/packages/server/src/enterprise/services/role.service.ts new file mode 100644 index 00000000000..e2fed1b814d --- /dev/null +++ b/packages/server/src/enterprise/services/role.service.ts @@ -0,0 +1,169 @@ +import { StatusCodes } from 'http-status-codes' +import { DataSource, IsNull, QueryRunner } from 'typeorm' +import { InternalFlowiseError } from '../../errors/internalFlowiseError' +import { GeneralSuccessMessage } from '../../utils/constants' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' +import { Role } from '../database/entities/role.entity' +import { WorkspaceUser } from '../database/entities/workspace-user.entity' +import { isInvalidName, isInvalidUUID } from '../utils/validation.util' +import { OrganizationErrorMessage, OrganizationService } from './organization.service' +import { UserErrorMessage, UserService } from './user.service' + +export const enum RoleErrorMessage { + INVALID_ROLE_ID = 'Invalid Role Id', + INVALID_ROLE_NAME = 'Invalid Role Name', + INVALID_ROLE_PERMISSIONS = 'Invalid Role Permissions', + ROLE_NOT_FOUND = 'Role Not Found' +} + +export class RoleService { + private dataSource: DataSource + private userService: UserService + private organizationService: OrganizationService + + constructor() { + const appServer = getRunningExpressApp() + this.dataSource = appServer.AppDataSource + this.userService = new UserService() + this.organizationService = new OrganizationService() + } + + public validateRoleId(id: string | undefined) { + if (isInvalidUUID(id)) throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, RoleErrorMessage.INVALID_ROLE_ID) + } + + public async readRoleById(id: string | undefined, queryRunner: QueryRunner) { + this.validateRoleId(id) + return await queryRunner.manager.findOneBy(Role, { id }) + } + + public validateRoleName(name: string | undefined) { + if (isInvalidName(name)) throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, RoleErrorMessage.INVALID_ROLE_NAME) + } + + public async readRoleByOrganizationId(organizationId: string | undefined, queryRunner: QueryRunner) { + const organization = await this.organizationService.readOrganizationById(organizationId, queryRunner) + if (!organization) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, OrganizationErrorMessage.ORGANIZATION_NOT_FOUND) + + const roles = await queryRunner.manager.findBy(Role, { organizationId }) + return await Promise.all( + roles.map(async (role) => { + const workspaceUser = await queryRunner.manager.findBy(WorkspaceUser, { roleId: role.id }) + const userCount = workspaceUser.length + return { ...role, userCount } as Role & { userCount: number } + }) + ) + } + + public async readRoleByRoleIdOrganizationId(id: string | undefined, organizationId: string | undefined, queryRunner: QueryRunner) { + this.validateRoleId(id) + const organization = await this.organizationService.readOrganizationById(organizationId, queryRunner) + if (!organization) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, OrganizationErrorMessage.ORGANIZATION_NOT_FOUND) + + return await queryRunner.manager.findOneBy(Role, { id, organizationId }) + } + + public async readGeneralRoleByName(name: string | undefined, queryRunner: QueryRunner) { + this.validateRoleName(name) + const generalRole = await queryRunner.manager.findOneBy(Role, { name, organizationId: IsNull() }) + if (!generalRole) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, RoleErrorMessage.ROLE_NOT_FOUND) + return generalRole + } + + public async readRoleIsGeneral(id: string | undefined, queryRunner: QueryRunner) { + this.validateRoleId(id) + return await queryRunner.manager.findOneBy(Role, { id, organizationId: IsNull() }) + } + + public async readRoleByGeneral(queryRunner: QueryRunner) { + const generalRoles = await queryRunner.manager.find(Role, { where: { organizationId: IsNull() } }) + if (generalRoles.length <= 0) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, RoleErrorMessage.ROLE_NOT_FOUND) + return generalRoles + } + + public async readRole(queryRunner: QueryRunner) { + return await queryRunner.manager.find(Role) + } + + public async saveRole(data: Partial, queryRunner: QueryRunner) { + return await queryRunner.manager.save(Role, data) + } + + public async createRole(data: Partial) { + const queryRunner = this.dataSource.createQueryRunner() + await queryRunner.connect() + + const user = await this.userService.readUserById(data.createdBy, queryRunner) + if (!user) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + const organization = await this.organizationService.readOrganizationById(data.organizationId, queryRunner) + if (!organization) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, OrganizationErrorMessage.ORGANIZATION_NOT_FOUND) + this.validateRoleName(data.name) + if (!data.permissions) throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, RoleErrorMessage.INVALID_ROLE_PERMISSIONS) + data.updatedBy = data.createdBy + + let newRole = queryRunner.manager.create(Role, data) + try { + await queryRunner.startTransaction() + newRole = await this.saveRole(newRole, queryRunner) + await queryRunner.commitTransaction() + } catch (error) { + await queryRunner.rollbackTransaction() + throw error + } finally { + await queryRunner.release() + } + + return newRole + } + + public async updateRole(newRole: Partial) { + const queryRunner = this.dataSource.createQueryRunner() + await queryRunner.connect() + + const oldRole = await this.readRoleById(newRole.id, queryRunner) + if (!oldRole) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, RoleErrorMessage.ROLE_NOT_FOUND) + const user = await this.userService.readUserById(newRole.updatedBy, queryRunner) + if (!user) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + if (newRole.name) this.validateRoleName(newRole.name) + newRole.organizationId = oldRole.organizationId + newRole.createdBy = oldRole.createdBy + + let updateRole = queryRunner.manager.merge(Role, oldRole, newRole) + try { + await queryRunner.startTransaction() + updateRole = await this.saveRole(updateRole, queryRunner) + await queryRunner.commitTransaction() + } catch (error) { + await queryRunner.rollbackTransaction() + throw error + } finally { + await queryRunner.release() + } + + return updateRole + } + + public async deleteRole(organizationId: string | undefined, roleId: string | undefined) { + const queryRunner = this.dataSource.createQueryRunner() + try { + await queryRunner.connect() + + const role = await this.readRoleByRoleIdOrganizationId(roleId, organizationId, queryRunner) + if (!role) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, RoleErrorMessage.ROLE_NOT_FOUND) + + await queryRunner.startTransaction() + + await queryRunner.manager.delete(WorkspaceUser, { roleId }) + await queryRunner.manager.delete(Role, { id: roleId }) + + await queryRunner.commitTransaction() + + return { message: GeneralSuccessMessage.DELETED } + } catch (error) { + if (queryRunner.isTransactionActive) await queryRunner.rollbackTransaction() + throw error + } finally { + if (!queryRunner.isReleased) await queryRunner.release() + } + } +} diff --git a/packages/server/src/enterprise/services/user.service.ts b/packages/server/src/enterprise/services/user.service.ts new file mode 100644 index 00000000000..0f15b392448 --- /dev/null +++ b/packages/server/src/enterprise/services/user.service.ts @@ -0,0 +1,179 @@ +import { StatusCodes } from 'http-status-codes' +import bcrypt from 'bcryptjs' +import { InternalFlowiseError } from '../../errors/internalFlowiseError' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' +import { Telemetry, TelemetryEventType } from '../../utils/telemetry' +import { User, UserStatus } from '../database/entities/user.entity' +import { isInvalidEmail, isInvalidName, isInvalidPassword, isInvalidUUID } from '../utils/validation.util' +import { DataSource, QueryRunner } from 'typeorm' +import { generateId } from '../../utils' +import { GeneralErrorMessage } from '../../utils/constants' +import { getHash } from '../utils/encryption.util' + +export const enum UserErrorMessage { + EXPIRED_TEMP_TOKEN = 'Expired Temporary Token', + INVALID_TEMP_TOKEN = 'Invalid Temporary Token', + INVALID_USER_ID = 'Invalid User Id', + INVALID_USER_EMAIL = 'Invalid User Email', + INVALID_USER_CREDENTIAL = 'Invalid User Credential', + INVALID_USER_NAME = 'Invalid User Name', + INVALID_USER_TYPE = 'Invalid User Type', + INVALID_USER_STATUS = 'Invalid User Status', + USER_EMAIL_ALREADY_EXISTS = 'User Email Already Exists', + USER_EMAIL_UNVERIFIED = 'User Email Unverified', + USER_NOT_FOUND = 'User Not Found', + USER_FOUND_MULTIPLE = 'User Found Multiple', + INCORRECT_USER_EMAIL_OR_CREDENTIALS = 'Incorrect Email or Password' +} +export class UserService { + private telemetry: Telemetry + private dataSource: DataSource + + constructor() { + const appServer = getRunningExpressApp() + this.dataSource = appServer.AppDataSource + this.telemetry = appServer.telemetry + } + + public validateUserId(id: string | undefined) { + if (isInvalidUUID(id)) throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, UserErrorMessage.INVALID_USER_ID) + } + + public async readUserById(id: string | undefined, queryRunner: QueryRunner) { + this.validateUserId(id) + return await queryRunner.manager.findOneBy(User, { id }) + } + + public validateUserName(name: string | undefined) { + if (isInvalidName(name)) throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, UserErrorMessage.INVALID_USER_NAME) + } + + public validateUserEmail(email: string | undefined) { + if (isInvalidEmail(email)) throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, UserErrorMessage.INVALID_USER_EMAIL) + } + + public async readUserByEmail(email: string | undefined, queryRunner: QueryRunner) { + this.validateUserEmail(email) + return await queryRunner.manager.findOneBy(User, { email }) + } + + public async readUserByToken(token: string | undefined, queryRunner: QueryRunner) { + return await queryRunner.manager.findOneBy(User, { tempToken: token }) + } + + public validateUserStatus(status: string | undefined) { + if (status && !Object.values(UserStatus).includes(status as UserStatus)) + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, UserErrorMessage.INVALID_USER_STATUS) + } + + public async readUser(queryRunner: QueryRunner) { + return await queryRunner.manager.find(User) + } + + public encryptUserCredential(credential: string | undefined) { + if (!credential || isInvalidPassword(credential)) + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, GeneralErrorMessage.INVALID_PASSWORD) + return getHash(credential) + } + + public async createNewUser(data: Partial, queryRunner: QueryRunner) { + const user = await this.readUserByEmail(data.email, queryRunner) + if (user) throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, UserErrorMessage.USER_EMAIL_ALREADY_EXISTS) + if (data.credential) data.credential = this.encryptUserCredential(data.credential) + if (!data.name) data.name = data.email + this.validateUserName(data.name) + if (data.status) this.validateUserStatus(data.status) + + data.id = generateId() + if (data.createdBy) { + const createdBy = await this.readUserById(data.createdBy, queryRunner) + if (!createdBy) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + data.createdBy = createdBy.id + data.updatedBy = data.createdBy + } else { + data.createdBy = data.id + data.updatedBy = data.id + } + + return queryRunner.manager.create(User, data) + } + + public async saveUser(data: Partial, queryRunner: QueryRunner) { + return await queryRunner.manager.save(User, data) + } + + public async createUser(data: Partial) { + const queryRunner = this.dataSource.createQueryRunner() + await queryRunner.connect() + + let newUser = await this.createNewUser(data, queryRunner) + try { + await queryRunner.startTransaction() + newUser = await this.saveUser(newUser, queryRunner) + await queryRunner.commitTransaction() + } catch (error) { + await queryRunner.rollbackTransaction() + throw error + } finally { + await queryRunner.release() + } + + this.telemetry.sendTelemetry( + TelemetryEventType.USER_CREATED, + { + userId: newUser.id, + createdBy: newUser.createdBy + }, + newUser.id + ) + + return newUser + } + + public async updateUser(newUserData: Partial & { password?: string }) { + let queryRunner: QueryRunner | undefined + let updatedUser: Partial + try { + queryRunner = this.dataSource.createQueryRunner() + await queryRunner.connect() + const oldUserData = await this.readUserById(newUserData.id, queryRunner) + if (!oldUserData) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + + if (newUserData.updatedBy) { + const updateUserData = await this.readUserById(newUserData.updatedBy, queryRunner) + if (!updateUserData) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + } + + newUserData.createdBy = oldUserData.createdBy + + if (newUserData.name) { + this.validateUserName(newUserData.name) + } + + if (newUserData.status) { + this.validateUserStatus(newUserData.status) + } + + if (newUserData.password) { + const salt = bcrypt.genSaltSync(parseInt(process.env.PASSWORD_SALT_HASH_ROUNDS || '5')) + // @ts-ignore + const hash = bcrypt.hashSync(newUserData.password, salt) + newUserData.credential = hash + newUserData.tempToken = '' + newUserData.tokenExpiry = undefined + } + + updatedUser = queryRunner.manager.merge(User, oldUserData, newUserData) + await queryRunner.startTransaction() + await this.saveUser(updatedUser, queryRunner) + await queryRunner.commitTransaction() + } catch (error) { + if (queryRunner && queryRunner.isTransactionActive) await queryRunner.rollbackTransaction() + throw error + } finally { + if (queryRunner && !queryRunner.isReleased) await queryRunner.release() + } + + return updatedUser + } +} diff --git a/packages/server/src/enterprise/services/workspace-user.service.ts b/packages/server/src/enterprise/services/workspace-user.service.ts new file mode 100644 index 00000000000..af80b497f6c --- /dev/null +++ b/packages/server/src/enterprise/services/workspace-user.service.ts @@ -0,0 +1,392 @@ +import { StatusCodes } from 'http-status-codes' +import { DataSource, QueryRunner } from 'typeorm' +import { InternalFlowiseError } from '../../errors/internalFlowiseError' +import { GeneralErrorMessage, GeneralSuccessMessage } from '../../utils/constants' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' +import { OrganizationUser } from '../database/entities/organization-user.entity' +import { GeneralRole } from '../database/entities/role.entity' +import { WorkspaceUser, WorkspaceUserStatus } from '../database/entities/workspace-user.entity' +import { Workspace } from '../database/entities/workspace.entity' +import { isInvalidDateTime } from '../utils/validation.util' +import { OrganizationUserErrorMessage } from './organization-user.service' +import { OrganizationErrorMessage, OrganizationService } from './organization.service' +import { RoleErrorMessage, RoleService } from './role.service' +import { UserErrorMessage, UserService } from './user.service' +import { WorkspaceErrorMessage, WorkspaceService } from './workspace.service' + +export const enum WorkspaceUserErrorMessage { + INVALID_WORKSPACE_USER_SATUS = 'Invalid Workspace User Status', + INVALID_WORKSPACE_USER_LASTLOGIN = 'Invalid Workspace User LastLogin', + WORKSPACE_USER_ALREADY_EXISTS = 'Workspace User Already Exists', + WORKSPACE_USER_NOT_FOUND = 'Workspace User Not Found' +} + +export class WorkspaceUserService { + private dataSource: DataSource + private userService: UserService + private workspaceService: WorkspaceService + private roleService: RoleService + private organizationService: OrganizationService + + constructor() { + const appServer = getRunningExpressApp() + this.dataSource = appServer.AppDataSource + this.userService = new UserService() + this.workspaceService = new WorkspaceService() + this.roleService = new RoleService() + this.organizationService = new OrganizationService() + } + + public validateWorkspaceUserStatus(status: string | undefined) { + if (status && !Object.values(WorkspaceUserStatus).includes(status as WorkspaceUserStatus)) + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, WorkspaceUserErrorMessage.INVALID_WORKSPACE_USER_SATUS) + } + + public validateWorkspaceUserLastLogin(lastLogin: string | undefined) { + if (isInvalidDateTime(lastLogin)) + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, WorkspaceUserErrorMessage.INVALID_WORKSPACE_USER_LASTLOGIN) + } + + public async readWorkspaceUserByWorkspaceIdUserId( + workspaceId: string | undefined, + userId: string | undefined, + queryRunner: QueryRunner + ) { + const workspace = await this.workspaceService.readWorkspaceById(workspaceId, queryRunner) + if (!workspace) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, WorkspaceErrorMessage.WORKSPACE_NOT_FOUND) + const user = await this.userService.readUserById(userId, queryRunner) + if (!user) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + const ownerRole = await this.roleService.readGeneralRoleByName(GeneralRole.OWNER, queryRunner) + + const workspaceUser = await queryRunner.manager + .createQueryBuilder(WorkspaceUser, 'workspaceUser') + .innerJoinAndSelect('workspaceUser.role', 'role') + .where('workspaceUser.workspaceId = :workspaceId', { workspaceId }) + .andWhere('workspaceUser.userId = :userId', { userId }) + .getOne() + + return { + workspace, + workspaceUser: workspaceUser + ? { + ...workspaceUser, + isOrgOwner: workspaceUser.roleId === ownerRole?.id + } + : null + } + } + + public async readWorkspaceUserByWorkspaceId(workspaceId: string | undefined, queryRunner: QueryRunner) { + const workspace = await this.workspaceService.readWorkspaceById(workspaceId, queryRunner) + if (!workspace) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, WorkspaceErrorMessage.WORKSPACE_NOT_FOUND) + const ownerRole = await this.roleService.readGeneralRoleByName(GeneralRole.OWNER, queryRunner) + + const workspaceUsers = await queryRunner.manager + .createQueryBuilder(WorkspaceUser, 'workspaceUser') + .innerJoinAndSelect('workspaceUser.role', 'role') + .innerJoinAndSelect('workspaceUser.user', 'user') + .where('workspaceUser.workspaceId = :workspaceId', { workspaceId }) + .getMany() + + return workspaceUsers.map((workspaceUser) => { + delete workspaceUser.user.credential + delete workspaceUser.user.tempToken + delete workspaceUser.user.tokenExpiry + return { + ...workspaceUser, + isOrgOwner: workspaceUser.roleId === ownerRole?.id + } + }) + } + + public async readWorkspaceUserByUserId(userId: string | undefined, queryRunner: QueryRunner) { + const user = await this.userService.readUserById(userId, queryRunner) + if (!user) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + const ownerRole = await this.roleService.readGeneralRoleByName(GeneralRole.OWNER, queryRunner) + + const workspaceUsers = await queryRunner.manager + .createQueryBuilder(WorkspaceUser, 'workspaceUser') + .innerJoinAndSelect('workspaceUser.workspace', 'workspace') + .innerJoinAndSelect('workspaceUser.role', 'role') + .where('workspaceUser.userId = :userId', { userId }) + .getMany() + + return workspaceUsers.map((user) => ({ + ...user, + isOrgOwner: user.roleId === ownerRole?.id + })) + } + + public async readWorkspaceUserByOrganizationIdUserId( + organizationId: string | undefined, + userId: string | undefined, + queryRunner: QueryRunner + ) { + const user = await this.userService.readUserById(userId, queryRunner) + if (!user) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + const organization = await this.organizationService.readOrganizationById(organizationId, queryRunner) + if (!organization) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, OrganizationErrorMessage.ORGANIZATION_NOT_FOUND) + const ownerRole = await this.roleService.readGeneralRoleByName(GeneralRole.OWNER, queryRunner) + + const workspaceUsers = await queryRunner.manager + .createQueryBuilder(WorkspaceUser, 'workspaceUser') + .innerJoinAndSelect('workspaceUser.workspace', 'workspace') + .innerJoinAndSelect('workspaceUser.role', 'role') + .where('workspace.organizationId = :organizationId', { organizationId }) + .andWhere('workspaceUser.userId = :userId', { userId }) + .getMany() + + return workspaceUsers.map((user) => ({ + ...user, + isOrgOwner: user.roleId === ownerRole?.id + })) + } + + public async readWorkspaceUserByOrganizationId(organizationId: string | undefined, queryRunner: QueryRunner) { + const organization = await this.organizationService.readOrganizationById(organizationId, queryRunner) + if (!organization) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, OrganizationErrorMessage.ORGANIZATION_NOT_FOUND) + const ownerRole = await this.roleService.readGeneralRoleByName(GeneralRole.OWNER, queryRunner) + + const workspaceUsers = await queryRunner.manager + .createQueryBuilder(WorkspaceUser, 'workspaceUser') + .innerJoinAndSelect('workspaceUser.workspace', 'workspace') + .innerJoinAndSelect('workspaceUser.user', 'user') + .innerJoinAndSelect('workspaceUser.role', 'role') + .where('workspace.organizationId = :organizationId', { organizationId }) + .getMany() + + return workspaceUsers.map((user) => ({ + ...user, + isOrgOwner: user.roleId === ownerRole?.id + })) + } + + public async readWorkspaceUserByRoleId(roleId: string | undefined, queryRunner: QueryRunner) { + const role = await this.roleService.readRoleById(roleId, queryRunner) + if (!role) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, RoleErrorMessage.ROLE_NOT_FOUND) + const ownerRole = await this.roleService.readGeneralRoleByName(GeneralRole.OWNER, queryRunner) + + const workspaceUsers = await queryRunner.manager + .createQueryBuilder(WorkspaceUser, 'workspaceUser') + .innerJoinAndSelect('workspaceUser.workspace', 'workspace') + .innerJoinAndSelect('workspaceUser.user', 'user') + .innerJoinAndSelect('workspaceUser.role', 'role') + .where('workspaceUser.roleId = :roleId', { roleId }) + .getMany() + + return workspaceUsers.map((workspaceUser) => { + delete workspaceUser.user.credential + delete workspaceUser.user.tempToken + delete workspaceUser.user.tokenExpiry + return { + ...workspaceUser, + isOrgOwner: workspaceUser.roleId === ownerRole?.id + } + }) + } + + public async readWorkspaceUserByLastLogin(userId: string | undefined, queryRunner: QueryRunner) { + const user = await this.userService.readUserById(userId, queryRunner) + if (!user) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + const ownerRole = await this.roleService.readGeneralRoleByName(GeneralRole.OWNER, queryRunner) + + let workspaceUser = await queryRunner.manager + .createQueryBuilder(WorkspaceUser, 'workspaceUser') + .innerJoinAndSelect('workspaceUser.workspace', 'workspace') + .innerJoinAndSelect('workspaceUser.role', 'role') + .where('workspaceUser.userId = :userId', { userId }) + .andWhere('workspaceUser.lastLogin IS NOT NULL') + .orderBy('workspaceUser.lastLogin', 'DESC') + .take(1) + .getOne() + + if (!workspaceUser) return await this.readWorkspaceUserByUserId(userId, queryRunner) + + return { + ...workspaceUser, + isOrgOwner: workspaceUser.roleId === ownerRole?.id + } + } + + public createNewWorkspaceUser(data: Partial, queryRunner: QueryRunner) { + if (data.status) this.validateWorkspaceUserStatus(data.status) + data.updatedBy = data.createdBy + + return queryRunner.manager.create(WorkspaceUser, data) + } + + public async saveWorkspaceUser(data: Partial, queryRunner: QueryRunner) { + return await queryRunner.manager.save(WorkspaceUser, data) + } + + public async createWorkspaceUser(data: Partial) { + const queryRunner = this.dataSource.createQueryRunner() + await queryRunner.connect() + + const { workspace, workspaceUser } = await this.readWorkspaceUserByWorkspaceIdUserId(data.workspaceId, data.userId, queryRunner) + if (workspaceUser) throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, WorkspaceUserErrorMessage.WORKSPACE_USER_ALREADY_EXISTS) + const role = await this.roleService.readRoleById(data.roleId, queryRunner) + if (!role) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, RoleErrorMessage.ROLE_NOT_FOUND) + const createdBy = await this.userService.readUserById(data.createdBy, queryRunner) + if (!createdBy) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + + let newWorkspaceUser = this.createNewWorkspaceUser(data, queryRunner) + workspace.updatedBy = data.createdBy + try { + await queryRunner.startTransaction() + newWorkspaceUser = await this.saveWorkspaceUser(newWorkspaceUser, queryRunner) + await this.workspaceService.saveWorkspace(workspace, queryRunner) + await this.roleService.saveRole(role, queryRunner) + await queryRunner.commitTransaction() + } catch (error) { + await queryRunner.rollbackTransaction() + throw error + } finally { + await queryRunner.release() + } + + return newWorkspaceUser + } + + public async createWorkspace(data: Partial) { + const queryRunner = this.dataSource.createQueryRunner() + await queryRunner.connect() + + const organization = await this.organizationService.readOrganizationById(data.organizationId, queryRunner) + if (!organization) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, OrganizationErrorMessage.ORGANIZATION_NOT_FOUND) + + const user = await this.userService.readUserById(data.createdBy, queryRunner) + if (!user) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + + let organizationUser = await queryRunner.manager.findOneBy(OrganizationUser, { organizationId: organization.id, userId: user.id }) + if (!organizationUser) + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, OrganizationUserErrorMessage.ORGANIZATION_USER_NOT_FOUND) + organizationUser.updatedBy = user.id + + let newWorkspace = this.workspaceService.createNewWorkspace(data, queryRunner) + + const ownerRole = await this.roleService.readGeneralRoleByName(GeneralRole.OWNER, queryRunner) + if (!ownerRole) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, RoleErrorMessage.ROLE_NOT_FOUND) + + const role = await this.roleService.readRoleById(organizationUser.roleId, queryRunner) + if (!role) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, RoleErrorMessage.ROLE_NOT_FOUND) + + // Add org admin as workspace owner if the user creating the workspace is NOT the org admin + const orgAdmin = await queryRunner.manager.findOneBy(OrganizationUser, { + organizationId: organization.id, + roleId: ownerRole.id + }) + if (!orgAdmin) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, OrganizationUserErrorMessage.ORGANIZATION_USER_NOT_FOUND) + + let isCreateWorkSpaceUserOrgAdmin = false + if (orgAdmin.userId === user.id) { + isCreateWorkSpaceUserOrgAdmin = true + } + + let orgAdminUser: Partial = { + workspaceId: newWorkspace.id, + roleId: ownerRole.id, + userId: orgAdmin.userId, + createdBy: orgAdmin.userId + } + if (!isCreateWorkSpaceUserOrgAdmin) orgAdminUser = this.createNewWorkspaceUser(orgAdminUser, queryRunner) + + let newWorkspaceUser: Partial = { + workspaceId: newWorkspace.id, + roleId: role.id, + userId: user.id, + createdBy: user.id + } + // If user creating the workspace is an invited user, not the organization admin, inherit the role from existingWorkspaceId + if ((data as any).existingWorkspaceId) { + const existingWorkspaceUser = await queryRunner.manager.findOneBy(WorkspaceUser, { + workspaceId: (data as any).existingWorkspaceId, + userId: user.id + }) + if (existingWorkspaceUser) { + newWorkspaceUser.roleId = existingWorkspaceUser.roleId + } + } + + newWorkspaceUser = this.createNewWorkspaceUser(newWorkspaceUser, queryRunner) + + try { + await queryRunner.startTransaction() + newWorkspace = await this.workspaceService.saveWorkspace(newWorkspace, queryRunner) + if (!isCreateWorkSpaceUserOrgAdmin) await this.saveWorkspaceUser(orgAdminUser, queryRunner) + await this.saveWorkspaceUser(newWorkspaceUser, queryRunner) + await queryRunner.manager.save(OrganizationUser, organizationUser) + await queryRunner.commitTransaction() + } catch (error) { + await queryRunner.rollbackTransaction() + throw error + } finally { + await queryRunner.release() + } + + return newWorkspace + } + + public async updateWorkspaceUser(newWorkspaserUser: Partial) { + const queryRunner = this.dataSource.createQueryRunner() + await queryRunner.connect() + + const { workspaceUser } = await this.readWorkspaceUserByWorkspaceIdUserId( + newWorkspaserUser.workspaceId, + newWorkspaserUser.userId, + queryRunner + ) + if (!workspaceUser) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, WorkspaceUserErrorMessage.WORKSPACE_USER_NOT_FOUND) + if (newWorkspaserUser.roleId) { + const role = await this.roleService.readRoleById(newWorkspaserUser.roleId, queryRunner) + if (!role) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, RoleErrorMessage.ROLE_NOT_FOUND) + } + const updatedBy = await this.userService.readUserById(newWorkspaserUser.updatedBy, queryRunner) + if (!updatedBy) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + if (newWorkspaserUser.status) this.validateWorkspaceUserStatus(newWorkspaserUser.status) + if (newWorkspaserUser.lastLogin) this.validateWorkspaceUserLastLogin(newWorkspaserUser.lastLogin) + newWorkspaserUser.createdBy = workspaceUser.createdBy + + let updataWorkspaceUser = queryRunner.manager.merge(WorkspaceUser, workspaceUser, newWorkspaserUser) + try { + await queryRunner.startTransaction() + updataWorkspaceUser = await this.saveWorkspaceUser(updataWorkspaceUser, queryRunner) + await queryRunner.commitTransaction() + } catch (error) { + await queryRunner.rollbackTransaction() + throw error + } finally { + await queryRunner.release() + } + + return updataWorkspaceUser + } + + public async deleteWorkspaceUser(workspaceId: string | undefined, userId: string | undefined) { + const queryRunner = this.dataSource.createQueryRunner() + try { + await queryRunner.connect() + const { workspace, workspaceUser } = await this.readWorkspaceUserByWorkspaceIdUserId(workspaceId, userId, queryRunner) + if (!workspaceUser) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, WorkspaceUserErrorMessage.WORKSPACE_USER_NOT_FOUND) + const role = await this.roleService.readRoleById(workspaceUser.roleId, queryRunner) + if (!role) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, RoleErrorMessage.ROLE_NOT_FOUND) + if (role.name === GeneralRole.OWNER) + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, GeneralErrorMessage.NOT_ALLOWED_TO_DELETE_OWNER) + + await queryRunner.startTransaction() + + await queryRunner.manager.delete(WorkspaceUser, { workspaceId, userId }) + await this.roleService.saveRole(role, queryRunner) + await this.workspaceService.saveWorkspace(workspace, queryRunner) + + await queryRunner.commitTransaction() + + return { message: GeneralSuccessMessage.DELETED } + } catch (error) { + if (queryRunner.isTransactionActive) await queryRunner.rollbackTransaction() + throw error + } finally { + if (!queryRunner.isReleased) await queryRunner.release() + } + } +} diff --git a/packages/server/src/enterprise/services/workspace.service.ts b/packages/server/src/enterprise/services/workspace.service.ts new file mode 100644 index 00000000000..a5c4aa58ef6 --- /dev/null +++ b/packages/server/src/enterprise/services/workspace.service.ts @@ -0,0 +1,327 @@ +import { StatusCodes } from 'http-status-codes' +import { DataSource, EntityManager, In, IsNull, QueryRunner, UpdateResult } from 'typeorm' +import { ApiKey } from '../../database/entities/ApiKey' +import { Assistant } from '../../database/entities/Assistant' +import { ChatFlow } from '../../database/entities/ChatFlow' +import { ChatMessage } from '../../database/entities/ChatMessage' +import { ChatMessageFeedback } from '../../database/entities/ChatMessageFeedback' +import { Credential } from '../../database/entities/Credential' +import { CustomTemplate } from '../../database/entities/CustomTemplate' +import { Dataset } from '../../database/entities/Dataset' +import { DatasetRow } from '../../database/entities/DatasetRow' +import { DocumentStore } from '../../database/entities/DocumentStore' +import { DocumentStoreFileChunk } from '../../database/entities/DocumentStoreFileChunk' +import { Evaluation } from '../../database/entities/Evaluation' +import { EvaluationRun } from '../../database/entities/EvaluationRun' +import { Evaluator } from '../../database/entities/Evaluator' +import { Execution } from '../../database/entities/Execution' +import { Tool } from '../../database/entities/Tool' +import { UpsertHistory } from '../../database/entities/UpsertHistory' +import { Variable } from '../../database/entities/Variable' +import { InternalFlowiseError } from '../../errors/internalFlowiseError' +import { generateId } from '../../utils' +import { GeneralSuccessMessage } from '../../utils/constants' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' +import { WorkspaceShared } from '../database/entities/EnterpriseEntities' +import { GeneralRole } from '../database/entities/role.entity' +import { WorkspaceUser } from '../database/entities/workspace-user.entity' +import { Workspace, WorkspaceName } from '../database/entities/workspace.entity' +import { isInvalidName, isInvalidUUID } from '../utils/validation.util' +import { OrganizationErrorMessage, OrganizationService } from './organization.service' +import { RoleErrorMessage, RoleService } from './role.service' +import { UserErrorMessage, UserService } from './user.service' + +export const enum WorkspaceErrorMessage { + INVALID_WORKSPACE_ID = 'Invalid Workspace Id', + INVALID_WORKSPACE_NAME = 'Invalid Workspace Name', + WORKSPACE_NOT_FOUND = 'Workspace Not Found', + WORKSPACE_RESERVERD_NAME = 'Workspace name cannot be Default Workspace or Personal Workspace - this is a reserved name' +} + +export class WorkspaceService { + private dataSource: DataSource + private userService: UserService + private organizationService: OrganizationService + private roleService: RoleService + + constructor() { + const appServer = getRunningExpressApp() + this.dataSource = appServer.AppDataSource + this.userService = new UserService() + this.organizationService = new OrganizationService() + this.roleService = new RoleService() + } + + public validateWorkspaceId(id: string | undefined) { + if (isInvalidUUID(id)) throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, WorkspaceErrorMessage.INVALID_WORKSPACE_ID) + } + + public async readWorkspaceById(id: string | undefined, queryRunner: QueryRunner) { + this.validateWorkspaceId(id) + return await queryRunner.manager.findOneBy(Workspace, { id }) + } + + public validateWorkspaceName(name: string | undefined, isRegister: boolean = false) { + if (isInvalidName(name)) throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, WorkspaceErrorMessage.INVALID_WORKSPACE_NAME) + if (!isRegister && (name === WorkspaceName.DEFAULT_PERSONAL_WORKSPACE || name === WorkspaceName.DEFAULT_WORKSPACE)) { + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, WorkspaceErrorMessage.WORKSPACE_RESERVERD_NAME) + } + } + + public async readWorkspaceByOrganizationId(organizationId: string | undefined, queryRunner: QueryRunner) { + await this.organizationService.readOrganizationById(organizationId, queryRunner) + const workspaces = await queryRunner.manager.findBy(Workspace, { organizationId }) + + const rolePersonalWorkspace = await this.roleService.readGeneralRoleByName(GeneralRole.PERSONAL_WORKSPACE, queryRunner) + if (!rolePersonalWorkspace) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, RoleErrorMessage.ROLE_NOT_FOUND) + + const filteredWorkspaces = await Promise.all( + workspaces.map(async (workspace) => { + const workspaceUsers = await queryRunner.manager.findBy(WorkspaceUser, { workspaceId: workspace.id }) + + // Skip if any user in the workspace has PERSONAL_WORKSPACE role + const hasPersonalWorkspaceUser = workspaceUsers.some((user) => user.roleId === rolePersonalWorkspace.id) + if (hasPersonalWorkspaceUser) { + return null + } + + return { + ...workspace, + userCount: workspaceUsers.length + } as Workspace & { userCount: number } + }) + ) + + // Filter out null values (personal workspaces) + return filteredWorkspaces.filter((workspace): workspace is Workspace & { userCount: number } => workspace !== null) + } + + public createNewWorkspace(data: Partial, queryRunner: QueryRunner, isRegister: boolean = false) { + this.validateWorkspaceName(data.name, isRegister) + data.updatedBy = data.createdBy + data.id = generateId() + + return queryRunner.manager.create(Workspace, data) + } + + public async saveWorkspace(data: Partial, queryRunner: QueryRunner) { + return await queryRunner.manager.save(Workspace, data) + } + + public async createWorkspace(data: Partial) { + const queryRunner = this.dataSource.createQueryRunner() + await queryRunner.connect() + + const organization = await this.organizationService.readOrganizationById(data.organizationId, queryRunner) + if (!organization) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, OrganizationErrorMessage.ORGANIZATION_NOT_FOUND) + const user = await this.userService.readUserById(data.createdBy, queryRunner) + if (!user) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + + let newWorkspace = this.createNewWorkspace(data, queryRunner) + try { + await queryRunner.startTransaction() + newWorkspace = await this.saveWorkspace(newWorkspace, queryRunner) + await queryRunner.commitTransaction() + } catch (error) { + await queryRunner.rollbackTransaction() + throw error + } finally { + await queryRunner.release() + } + + return newWorkspace + } + + public async updateWorkspace(newWorkspaceData: Partial) { + const queryRunner = this.dataSource.createQueryRunner() + await queryRunner.connect() + + const oldWorkspaceData = await this.readWorkspaceById(newWorkspaceData.id, queryRunner) + if (!oldWorkspaceData) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, WorkspaceErrorMessage.WORKSPACE_NOT_FOUND) + const user = await this.userService.readUserById(newWorkspaceData.updatedBy, queryRunner) + if (!user) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + if (newWorkspaceData.name) { + this.validateWorkspaceName(newWorkspaceData.name) + } + newWorkspaceData.organizationId = oldWorkspaceData.organizationId + newWorkspaceData.createdBy = oldWorkspaceData.createdBy + + let updateWorkspace = queryRunner.manager.merge(Workspace, oldWorkspaceData, newWorkspaceData) + try { + await queryRunner.startTransaction() + updateWorkspace = await this.saveWorkspace(updateWorkspace, queryRunner) + await queryRunner.commitTransaction() + } catch (error) { + await queryRunner.rollbackTransaction() + throw error + } finally { + await queryRunner.release() + } + + return updateWorkspace + } + + public async deleteWorkspaceById(queryRunner: QueryRunner, workspaceId: string) { + const workspace = await this.readWorkspaceById(workspaceId, queryRunner) + if (!workspace) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, WorkspaceErrorMessage.WORKSPACE_NOT_FOUND) + + // First get all related entities that need to be deleted + const chatflows = await queryRunner.manager.findBy(ChatFlow, { workspaceId }) + const documentStores = await queryRunner.manager.findBy(DocumentStore, { workspaceId }) + const evaluations = await queryRunner.manager.findBy(Evaluation, { workspaceId }) + const datasets = await queryRunner.manager.findBy(Dataset, { workspaceId }) + + // Extract IDs for bulk deletion + const chatflowIds = chatflows.map((cf) => cf.id) + const documentStoreIds = documentStores.map((ds) => ds.id) + const evaluationIds = evaluations.map((e) => e.id) + const datasetIds = datasets.map((d) => d.id) + + // Start deleting in the correct order to maintain referential integrity + await queryRunner.manager.delete(WorkspaceUser, { workspaceId }) + await queryRunner.manager.delete(ApiKey, { workspaceId }) + await queryRunner.manager.delete(Assistant, { workspaceId }) + await queryRunner.manager.delete(Execution, { workspaceId }) + + // Delete chatflow related entities + if (chatflowIds.length > 0) { + await queryRunner.manager.delete(ChatFlow, { workspaceId }) + await queryRunner.manager.delete(ChatMessageFeedback, { chatflowid: In(chatflowIds) }) + await queryRunner.manager.delete(ChatMessage, { chatflowid: In(chatflowIds) }) + await queryRunner.manager.delete(UpsertHistory, { chatflowid: In(chatflowIds) }) + } + + await queryRunner.manager.delete(Credential, { workspaceId }) + await queryRunner.manager.delete(CustomTemplate, { workspaceId }) + + // Delete dataset related entities + if (datasetIds.length > 0) { + await queryRunner.manager.delete(Dataset, { workspaceId }) + await queryRunner.manager.delete(DatasetRow, { datasetId: In(datasetIds) }) + } + + // Delete document store related entities + if (documentStoreIds.length > 0) { + await queryRunner.manager.delete(DocumentStore, { workspaceId }) + await queryRunner.manager.delete(DocumentStoreFileChunk, { storeId: In(documentStoreIds) }) + } + + // Delete evaluation related entities + if (evaluationIds.length > 0) { + await queryRunner.manager.delete(Evaluation, { workspaceId }) + await queryRunner.manager.delete(EvaluationRun, { evaluationId: In(evaluationIds) }) + } + + await queryRunner.manager.delete(Evaluator, { workspaceId }) + await queryRunner.manager.delete(Tool, { workspaceId }) + await queryRunner.manager.delete(Variable, { workspaceId }) + await queryRunner.manager.delete(WorkspaceShared, { workspaceId }) + + // Finally delete the workspace itself + await queryRunner.manager.delete(Workspace, { id: workspaceId }) + + return workspace + } + + public async getSharedWorkspacesForItem(itemId: string) { + const sharedWorkspaces = await this.dataSource.getRepository(WorkspaceShared).find({ + where: { + sharedItemId: itemId + } + }) + if (sharedWorkspaces.length === 0) { + return [] + } + + const workspaceIds = sharedWorkspaces.map((ws) => ws.workspaceId) + const workspaces = await this.dataSource.getRepository(Workspace).find({ + select: ['id', 'name'], + where: { id: In(workspaceIds) } + }) + + return sharedWorkspaces.map((sw) => { + const workspace = workspaces.find((w) => w.id === sw.workspaceId) + return { + workspaceId: sw.workspaceId, + workspaceName: workspace?.name, + sharedItemId: sw.sharedItemId, + itemType: sw.itemType + } + }) + } + + public async getSharedItemsForWorkspace(wsId: string, itemType: string) { + const sharedItems = await this.dataSource.getRepository(WorkspaceShared).find({ + where: { + workspaceId: wsId, + itemType: itemType + } + }) + if (sharedItems.length === 0) { + return [] + } + + const itemIds = sharedItems.map((item) => item.sharedItemId) + if (itemType === 'credential') { + return await this.dataSource.getRepository(Credential).find({ + select: ['id', 'name', 'credentialName'], + where: { id: In(itemIds) } + }) + } else if (itemType === 'custom_template') { + return await this.dataSource.getRepository(CustomTemplate).find({ + where: { id: In(itemIds) } + }) + } + return [] + } + + public async setSharedWorkspacesForItem(itemId: string, body: { itemType: string; workspaceIds: string[] }) { + const { itemType, workspaceIds } = body + + await this.dataSource.transaction(async (transactionalEntityManager: EntityManager) => { + // Delete existing shared workspaces for the item + await transactionalEntityManager.getRepository(WorkspaceShared).delete({ + sharedItemId: itemId + }) + + // Add new shared workspaces + const sharedWorkspaces = workspaceIds.map((workspaceId) => + transactionalEntityManager.getRepository(WorkspaceShared).create({ + workspaceId, + sharedItemId: itemId, + itemType + }) + ) + await transactionalEntityManager.getRepository(WorkspaceShared).save(sharedWorkspaces) + }) + + return { message: GeneralSuccessMessage.UPDATED } + } + + /** + * Updates all entities with null workspaceId to the specified workspaceId + * Used for migrating legacy data that was created before workspace implementation + * This function is guaranteed to return meaningful results with affected row counts + * @param queryRunner The TypeORM query runner to execute database operations + * @param workspaceId The target workspaceId to assign to records with null workspaceId + * @returns An array of update results, each containing the count of affected rows. + * The array will always contain results for each entity type in the following order: + * [ApiKey, Assistant, ChatFlow, Credential, CustomTemplate, Dataset, DocumentStore, Evaluation, Evaluator, Tool, Variable] + */ + public async setNullWorkspaceId(queryRunner: QueryRunner, workspaceId: string): Promise { + return await Promise.all([ + queryRunner.manager.update(ApiKey, { workspaceId: IsNull() }, { workspaceId }), + queryRunner.manager.update(Assistant, { workspaceId: IsNull() }, { workspaceId }), + queryRunner.manager.update(ChatFlow, { workspaceId: IsNull() }, { workspaceId }), + queryRunner.manager.update(Credential, { workspaceId: IsNull() }, { workspaceId }), + queryRunner.manager.update(CustomTemplate, { workspaceId: IsNull() }, { workspaceId }), + queryRunner.manager.update(Dataset, { workspaceId: IsNull() }, { workspaceId }), + queryRunner.manager.update(DocumentStore, { workspaceId: IsNull() }, { workspaceId }), + queryRunner.manager.update(Evaluation, { workspaceId: IsNull() }, { workspaceId }), + queryRunner.manager.update(Evaluator, { workspaceId: IsNull() }, { workspaceId }), + queryRunner.manager.update(Execution, { workspaceId: IsNull() }, { workspaceId }), + queryRunner.manager.update(Tool, { workspaceId: IsNull() }, { workspaceId }), + queryRunner.manager.update(Variable, { workspaceId: IsNull() }, { workspaceId }) + ]) + } +} diff --git a/packages/server/src/enterprise/sso/Auth0SSO.ts b/packages/server/src/enterprise/sso/Auth0SSO.ts new file mode 100644 index 00000000000..d338feede52 --- /dev/null +++ b/packages/server/src/enterprise/sso/Auth0SSO.ts @@ -0,0 +1,153 @@ +// Auth0SSO.ts +import SSOBase from './SSOBase' +import passport from 'passport' +import { Profile, Strategy as Auth0Strategy } from 'passport-auth0' +import { Request } from 'express' +import auditService from '../services/audit' +import { ErrorMessage, LoggedInUser, LoginActivityCode } from '../Interface.Enterprise' +import { setTokenOrCookies } from '../middleware/passport' +import axios from 'axios' + +const PROVIDER_NAME_AUTH0_SSO = 'Auth0 SSO' + +class Auth0SSO extends SSOBase { + static LOGIN_URI = '/api/v1/auth0/login' + static CALLBACK_URI = '/api/v1/auth0/callback' + static LOGOUT_URI = '/api/v1/auth0/logout' + + getProviderName(): string { + return PROVIDER_NAME_AUTH0_SSO + } + + static getCallbackURL(): string { + const APP_URL = process.env.APP_URL || 'http://127.0.0.1:' + process.env.PORT + return APP_URL + Auth0SSO.CALLBACK_URI + } + + setSSOConfig(ssoConfig: any) { + super.setSSOConfig(ssoConfig) + if (ssoConfig) { + const { domain, clientID, clientSecret } = this.ssoConfig + + passport.use( + 'auth0', + new Auth0Strategy( + { + domain: domain || 'your_auth0_domain', + clientID: clientID || 'your_auth0_client_id', + clientSecret: clientSecret || 'your_auth0_client_secret', + callbackURL: Auth0SSO.getCallbackURL() || 'http://localhost:3000/auth/auth0/callback', + passReqToCallback: true + }, + async ( + req: Request, + accessToken: string, + refreshToken: string, + extraParams: any, + profile: Profile, + done: (error: any, user?: any) => void + ) => { + const email = profile.emails?.[0]?.value + if (!email) { + await auditService.recordLoginActivity( + '', + LoginActivityCode.UNKNOWN_USER, + ErrorMessage.UNKNOWN_USER, + PROVIDER_NAME_AUTH0_SSO + ) + return done({ name: 'SSO_LOGIN_FAILED', message: ErrorMessage.UNKNOWN_USER }, undefined) + } + return await this.verifyAndLogin(this.app, email, done, profile, accessToken, refreshToken) + } + ) + ) + } else { + passport.unuse('auth0') + } + } + + initialize() { + this.setSSOConfig(this.ssoConfig) + + this.app.get(Auth0SSO.LOGIN_URI, (req, res, next?) => { + if (!this.getSSOConfig()) { + return res.status(400).json({ error: 'Auth0 SSO is not configured.' }) + } + passport.authenticate('auth0', { + scope: 'openid profile email' // Request scopes for profile and email information + })(req, res, next) + }) + + this.app.get(Auth0SSO.CALLBACK_URI, (req, res, next?) => { + if (!this.getSSOConfig()) { + return res.status(400).json({ error: 'Auth0 SSO is not configured.' }) + } + passport.authenticate('auth0', async (err: any, user: LoggedInUser) => { + try { + if (err || !user) { + if (err?.name == 'SSO_LOGIN_FAILED') { + const error = { message: err.message } + const signinUrl = `/signin?error=${encodeURIComponent(JSON.stringify(error))}` + return res.redirect(signinUrl) + } + return next ? next(err) : res.status(401).json(err) + } + req.login(user, { session: true }, async (error) => { + if (error) return next ? next(error) : res.status(401).json(error) + return setTokenOrCookies(res, user, true, req, true, true) + }) + } catch (error) { + return next ? next(error) : res.status(401).json(error) + } + })(req, res, next) + }) + } + + static async testSetup(ssoConfig: any) { + const { domain, clientID, clientSecret } = ssoConfig + + try { + const tokenResponse = await axios.post( + `https://${domain}/oauth/token`, + { + client_id: clientID, + client_secret: clientSecret, + audience: `https://${domain}/api/v2/`, + grant_type: 'client_credentials' + }, + { + headers: { 'Content-Type': 'application/json' } + } + ) + return { message: tokenResponse.status } + } catch (error) { + const errorMessage = 'Auth0 Configuration test failed. Please check your credentials and domain.' + return { error: errorMessage } + } + } + + async refreshToken(ssoRefreshToken: string) { + const { domain, clientID, clientSecret } = this.ssoConfig + + try { + const response = await axios.post( + `https://${domain}/oauth/token`, + { + client_id: clientID, + client_secret: clientSecret, + grant_type: 'refresh_token', + refresh_token: ssoRefreshToken + }, + { + headers: { 'Content-Type': 'application/json' } + } + ) + return { ...response.data } + } catch (error) { + const errorMessage = 'Failed to get refreshToken from Auth0.' + return { error: errorMessage } + } + } +} + +export default Auth0SSO diff --git a/packages/server/src/enterprise/sso/AzureSSO.ts b/packages/server/src/enterprise/sso/AzureSSO.ts new file mode 100644 index 00000000000..4e5781f50f1 --- /dev/null +++ b/packages/server/src/enterprise/sso/AzureSSO.ts @@ -0,0 +1,157 @@ +// AzureSSO.ts +import SSOBase from './SSOBase' +import passport from 'passport' +import { Profile, Strategy as OpenIDConnectStrategy, VerifyCallback } from 'passport-openidconnect' +import { Request } from 'express' +import auditService from '../services/audit' +import { ErrorMessage, LoggedInUser, LoginActivityCode } from '../Interface.Enterprise' +import { setTokenOrCookies } from '../middleware/passport' +import axios from 'axios' + +class AzureSSO extends SSOBase { + static LOGIN_URI = '/api/v1/azure/login' + static CALLBACK_URI = '/api/v1/azure/callback' + static LOGOUT_URI = '/api/v1/azure/logout' + + getProviderName(): string { + return 'Microsoft SSO' + } + + static getCallbackURL(): string { + const APP_URL = process.env.APP_URL || 'http://127.0.0.1:' + process.env.PORT + return APP_URL + AzureSSO.CALLBACK_URI + } + + initialize() { + this.setSSOConfig(this.ssoConfig) + + this.app.get(AzureSSO.LOGIN_URI, (req, res, next?) => { + if (!this.getSSOConfig()) { + return res.status(400).json({ error: 'Azure SSO is not configured.' }) + } + passport.authenticate('azure-ad', async () => { + if (next) next() + })(req, res, next) + }) + + this.app.get(AzureSSO.CALLBACK_URI, (req, res, next?) => { + if (!this.getSSOConfig()) { + return res.status(400).json({ error: 'Azure SSO is not configured.' }) + } + passport.authenticate('azure-ad', async (err: any, user: LoggedInUser) => { + try { + if (err || !user) { + if (err?.name == 'SSO_LOGIN_FAILED') { + const error = { message: err.message } + const signinUrl = `/signin?error=${encodeURIComponent(JSON.stringify(error))}` + return res.redirect(signinUrl) + } + return next ? next(err) : res.status(401).json(err) + } + req.login(user, { session: true }, async (error) => { + if (error) return next ? next(error) : res.status(401).json(error) + return setTokenOrCookies(res, user, true, req, true, true) + }) + } catch (error) { + return next ? next(error) : res.status(401).json(error) + } + })(req, res, next) + }) + } + + setSSOConfig(ssoConfig: any) { + super.setSSOConfig(ssoConfig) + if (this.ssoConfig) { + const { tenantID, clientID, clientSecret } = this.ssoConfig + passport.use( + 'azure-ad', + new OpenIDConnectStrategy( + { + issuer: `https://login.microsoftonline.com/${tenantID}/v2.0`, + authorizationURL: `https://login.microsoftonline.com/${tenantID}/oauth2/v2.0/authorize`, + tokenURL: `https://login.microsoftonline.com/${tenantID}/oauth2/v2.0/token`, + userInfoURL: `https://graph.microsoft.com/oidc/userinfo`, + clientID: clientID || 'your_client_id', + clientSecret: clientSecret || 'your_client_secret', + callbackURL: AzureSSO.getCallbackURL(), + scope: 'openid profile email offline_access', + passReqToCallback: true + }, + async ( + req: Request, + issuer: string, + profile: Profile, + context: object, + idToken: string | object, + accessToken: string | object, + refreshToken: string, + done: VerifyCallback + ) => { + const email = profile.username + if (!email) { + await auditService.recordLoginActivity( + '', + LoginActivityCode.UNKNOWN_USER, + ErrorMessage.UNKNOWN_USER, + this.getProviderName() + ) + return done({ name: 'SSO_LOGIN_FAILED', message: ErrorMessage.UNKNOWN_USER }, undefined) + } + return this.verifyAndLogin(this.app, email, done, profile, accessToken, refreshToken) + } + ) + ) + } else { + passport.unuse('azure-ad') + } + } + + static async testSetup(ssoConfig: any) { + const { tenantID, clientID, clientSecret } = ssoConfig + + try { + const tokenResponse = await axios.post( + `https://login.microsoftonline.com/${tenantID}/oauth2/v2.0/token`, + new URLSearchParams({ + client_id: clientID, + client_secret: clientSecret, + grant_type: 'client_credentials', + scope: 'https://graph.microsoft.com/.default' + }).toString(), + { + headers: { 'Content-Type': 'application/x-www-form-urlencoded' } + } + ) + return { message: tokenResponse.statusText } + } catch (error) { + const errorMessage = 'Microsoft Configuration test failed. Please check your credentials and Tenant ID.' + return { error: errorMessage } + } + } + + async refreshToken(ssoRefreshToken: string) { + const { tenantID, clientID, clientSecret } = this.ssoConfig + + try { + const response = await axios.post( + `https://login.microsoftonline.com/${tenantID}/oauth2/v2.0/token`, + new URLSearchParams({ + client_id: clientID || '', + client_secret: clientSecret || '', + grant_type: 'refresh_token', + refresh_token: ssoRefreshToken, + scope: 'openid profile email' + }).toString(), + { + headers: { 'Content-Type': 'application/x-www-form-urlencoded' } + } + ) + return { ...response.data } + } catch (error) { + const errorMessage = 'Failed to get refreshToken from Azure.' + return { error: errorMessage } + } + } +} + +export default AzureSSO diff --git a/packages/server/src/enterprise/sso/GithubSSO.ts b/packages/server/src/enterprise/sso/GithubSSO.ts new file mode 100644 index 00000000000..11f5ad23429 --- /dev/null +++ b/packages/server/src/enterprise/sso/GithubSSO.ts @@ -0,0 +1,151 @@ +import SSOBase from './SSOBase' +import passport from 'passport' +import { LoggedInUser } from '../Interface.Enterprise' +import { setTokenOrCookies } from '../middleware/passport' +import { Strategy as GitHubStrategy, Profile } from 'passport-github' + +class GithubSSO extends SSOBase { + static LOGIN_URI = '/api/v1/github/login' + static CALLBACK_URI = '/api/v1/github/callback' + static LOGOUT_URI = '/api/v1/github/logout' + + getProviderName(): string { + return 'Github SSO' + } + + static getCallbackURL(): string { + const APP_URL = process.env.APP_URL || 'http://127.0.0.1:' + process.env.PORT + return APP_URL + GithubSSO.CALLBACK_URI + } + + setSSOConfig(ssoConfig: any) { + super.setSSOConfig(ssoConfig) + if (this.ssoConfig) { + const clientID = this.ssoConfig.clientID + const clientSecret = this.ssoConfig.clientSecret + + // Configure Passport to use the GitHub strategy + passport.use( + new GitHubStrategy( + { + clientID: clientID, + clientSecret: clientSecret, + callbackURL: GithubSSO.CALLBACK_URI, + scope: ['user:email'] + }, + async (accessToken: string, refreshToken: string, profile: Profile, done: any) => { + // Fetch emails from GitHub API using the access token. + const emailResponse = await fetch('https://api.github.com/user/emails', { + headers: { + Authorization: `token ${accessToken}`, + 'User-Agent': 'Node.js' + } + }) + const emails = await emailResponse.json() + // Look for a verified primary email. + let primaryEmail = emails.find((email: any) => email.primary && email.verified)?.email + if (!primaryEmail && Array.isArray(emails) && emails.length > 0) { + primaryEmail = emails[0].email + } + return this.verifyAndLogin(this.app, primaryEmail, done, profile, accessToken, refreshToken) + } + ) + ) + } else { + passport.unuse('github') + } + } + + initialize() { + if (this.ssoConfig) { + this.setSSOConfig(this.ssoConfig) + } + + this.app.get(GithubSSO.LOGIN_URI, (req, res, next?) => { + if (!this.getSSOConfig()) { + return res.status(400).json({ error: 'Github SSO is not configured.' }) + } + passport.authenticate('github', async () => { + if (next) next() + })(req, res, next) + }) + + this.app.get(GithubSSO.CALLBACK_URI, (req, res, next?) => { + passport.authenticate('github', async (err: any, user: LoggedInUser) => { + try { + if (err || !user) { + if (err?.name == 'SSO_LOGIN_FAILED') { + const error = { message: err.message } + const signinUrl = `/signin?error=${encodeURIComponent(JSON.stringify(error))}` + return res.redirect(signinUrl) + } + return next ? next(err) : res.status(401).json(err) + } + req.login(user, { session: true }, async (error) => { + if (error) return next ? next(error) : res.status(401).json(error) + return setTokenOrCookies(res, user, true, req, true, true) + }) + } catch (error) { + return next ? next(error) : res.status(401).json(error) + } + })(req, res, next) + }) + } + + static async testSetup(ssoConfig: any) { + const { clientID, clientSecret } = ssoConfig + + try { + const response = await fetch('https://github.com/login/oauth/access_token', { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json' + }, + body: JSON.stringify({ + client_id: clientID, + client_secret: clientSecret, + code: 'dummy_code_for_testing' + }) + }) + const data = await response.json() + if (data.error === 'bad_verification_code') { + return { message: 'ClientID and clientSecret are valid.' } + } else { + return { error: `Invalid credentials. Received error: ${data.error || 'unknown'}` } + } + } catch (error) { + return { error: 'Github Configuration test failed. Please check your credentials.' } + } + } + + async refreshToken(currentRefreshToken: string) { + const { clientID, clientSecret } = this.ssoConfig + + try { + const response = await fetch('https://github.com/login/oauth/access_token', { + method: 'POST', + headers: { + Accept: 'application/json', + 'Content-Type': 'application/json' + }, + body: JSON.stringify({ + client_id: clientID, + client_secret: clientSecret, + grant_type: 'refresh_token', + refresh_token: currentRefreshToken + }) + }) + const data = await response.json() + if (data.error || !data.access_token) { + return { error: 'Failed to get refreshToken from Github.' } + } else { + return data + } + } catch (error) { + return { error: 'Failed to get refreshToken from Github.' } + } + } +} + +export default GithubSSO diff --git a/packages/server/src/enterprise/sso/GoogleSSO.ts b/packages/server/src/enterprise/sso/GoogleSSO.ts new file mode 100644 index 00000000000..8dd97cf098a --- /dev/null +++ b/packages/server/src/enterprise/sso/GoogleSSO.ts @@ -0,0 +1,155 @@ +// GoogleSSO.ts +import SSOBase from './SSOBase' +import passport from 'passport' +import { Profile, Strategy as OpenIDConnectStrategy, VerifyCallback } from 'passport-openidconnect' +import auditService from '../services/audit' +import { ErrorMessage, LoggedInUser, LoginActivityCode } from '../Interface.Enterprise' +import { setTokenOrCookies } from '../middleware/passport' +import axios from 'axios' + +class GoogleSSO extends SSOBase { + static LOGIN_URI = '/api/v1/google/login' + static CALLBACK_URI = '/api/v1/google/callback' + static LOGOUT_URI = '/api/v1/google/logout' + + getProviderName(): string { + return 'Google SSO' + } + + static getCallbackURL(): string { + const APP_URL = process.env.APP_URL || 'http://127.0.0.1:' + process.env.PORT + return APP_URL + GoogleSSO.CALLBACK_URI + } + + setSSOConfig(ssoConfig: any) { + super.setSSOConfig(ssoConfig) + if (this.ssoConfig) { + const clientID = this.ssoConfig.clientID + const clientSecret = this.ssoConfig.clientSecret + + passport.use( + 'google', + new OpenIDConnectStrategy( + { + issuer: 'https://accounts.google.com', + authorizationURL: 'https://accounts.google.com/o/oauth2/v2/auth', + tokenURL: 'https://oauth2.googleapis.com/token', + userInfoURL: 'https://openidconnect.googleapis.com/v1/userinfo', + clientID: clientID || 'your_google_client_id', + clientSecret: clientSecret || 'your_google_client_secret', + callbackURL: GoogleSSO.getCallbackURL() || 'http://localhost:3000/auth/google/callback', + scope: 'openid profile email' + }, + async ( + issuer: string, + profile: Profile, + context: object, + idToken: string | object, + accessToken: string | object, + refreshToken: string, + done: VerifyCallback + ) => { + if (profile.emails && profile.emails.length > 0) { + const email = profile.emails[0].value + return this.verifyAndLogin(this.app, email, done, profile, accessToken, refreshToken) + } else { + await auditService.recordLoginActivity( + '', + LoginActivityCode.UNKNOWN_USER, + ErrorMessage.UNKNOWN_USER, + this.getProviderName() + ) + return done({ name: 'SSO_LOGIN_FAILED', message: ErrorMessage.UNKNOWN_USER }, undefined) + } + } + ) + ) + } else { + passport.unuse('google') + } + } + + initialize() { + if (this.ssoConfig) { + this.setSSOConfig(this.ssoConfig) + } + + this.app.get(GoogleSSO.LOGIN_URI, (req, res, next?) => { + if (!this.getSSOConfig()) { + return res.status(400).json({ error: 'Google SSO is not configured.' }) + } + passport.authenticate('google', async () => { + if (next) next() + })(req, res, next) + }) + + this.app.get(GoogleSSO.CALLBACK_URI, (req, res, next?) => { + if (!this.getSSOConfig()) { + return res.status(400).json({ error: 'Google SSO is not configured.' }) + } + passport.authenticate('google', async (err: any, user: LoggedInUser) => { + try { + if (err || !user) { + if (err?.name == 'SSO_LOGIN_FAILED') { + const error = { message: err.message } + const signinUrl = `/signin?error=${encodeURIComponent(JSON.stringify(error))}` + return res.redirect(signinUrl) + } + return next ? next(err) : res.status(401).json(err) + } + req.login(user, { session: true }, async (error) => { + if (error) return next ? next(error) : res.status(401).json(error) + return setTokenOrCookies(res, user, true, req, true, true) + }) + } catch (error) { + return next ? next(error) : res.status(401).json(error) + } + })(req, res, next) + }) + } + + static async testSetup(ssoConfig: any) { + const { clientID, redirectURL } = ssoConfig + + try { + const authorizationUrl = `https://accounts.google.com/o/oauth2/v2/auth?${new URLSearchParams({ + client_id: clientID, + redirect_uri: redirectURL, + response_type: 'code', + scope: 'openid email profile' + }).toString()}` + + const tokenResponse = await axios.get(authorizationUrl) + return { message: tokenResponse.statusText } + } catch (error) { + const errorMessage = 'Google Configuration test failed. Please check your credentials.' + return { error: errorMessage } + } + } + + async refreshToken(ssoRefreshToken: string) { + const { clientID, clientSecret } = this.ssoConfig + + try { + const response = await axios.post( + `https://oauth2.googleapis.com/token`, + new URLSearchParams({ + client_id: clientID || '', + client_secret: clientSecret || '', + grant_type: 'refresh_token', + refresh_token: ssoRefreshToken, + scope: 'refresh_token' + }).toString(), + { + headers: { 'Content-Type': 'application/x-www-form-urlencoded' } + } + ) + return { ...response.data } + } catch (error) { + const errorMessage = 'Failed to get refreshToken from Google.' + return { error: errorMessage } + } + } +} + +export default GoogleSSO diff --git a/packages/server/src/enterprise/sso/SSOBase.ts b/packages/server/src/enterprise/sso/SSOBase.ts new file mode 100644 index 00000000000..f990de5a85f --- /dev/null +++ b/packages/server/src/enterprise/sso/SSOBase.ts @@ -0,0 +1,154 @@ +// SSOBase.ts +import express from 'express' +import passport from 'passport' +import { IAssignedWorkspace, LoggedInUser } from '../Interface.Enterprise' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' +import { UserErrorMessage, UserService } from '../services/user.service' +import { WorkspaceUserService } from '../services/workspace-user.service' +import { AccountService } from '../services/account.service' +import { WorkspaceUser } from '../database/entities/workspace-user.entity' +import { OrganizationService } from '../services/organization.service' +import { GeneralRole } from '../database/entities/role.entity' +import { RoleErrorMessage, RoleService } from '../services/role.service' +import { InternalFlowiseError } from '../../errors/internalFlowiseError' +import { StatusCodes } from 'http-status-codes' +import { Platform } from '../../Interface' +import { UserStatus } from '../database/entities/user.entity' + +abstract class SSOBase { + protected app: express.Application + protected ssoConfig: any + + constructor(app: express.Application, ssoConfig?: any) { + this.app = app + this.ssoConfig = ssoConfig + } + + setSSOConfig(ssoConfig: any) { + this.ssoConfig = ssoConfig + } + + getSSOConfig() { + return this.ssoConfig + } + + abstract getProviderName(): string + abstract initialize(): void + abstract refreshToken(ssoRefreshToken: string): Promise<{ [key: string]: any }> + async verifyAndLogin( + app: express.Application, + email: string, + done: (err?: Error | null, user?: Express.User, info?: any) => void, + profile: passport.Profile, + accessToken: string | object, + refreshToken: string + ) { + let queryRunner + const ssoProviderName = this.getProviderName() + try { + queryRunner = getRunningExpressApp().AppDataSource.createQueryRunner() + await queryRunner.connect() + + const userService = new UserService() + const organizationService = new OrganizationService() + const workspaceUserService = new WorkspaceUserService() + + let user: any = await userService.readUserByEmail(email, queryRunner) + let wu: any = {} + + if (!user) { + // In ENTERPRISE mode, we don't want to create a new user if the user is not found + if (getRunningExpressApp().identityManager.getPlatformType() === Platform.ENTERPRISE) { + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, UserErrorMessage.USER_NOT_FOUND) + } + // no user found, register the user + const data: any = { + user: { + email: email, + name: profile.displayName || email, + status: UserStatus.ACTIVE, + credential: undefined + } + } + if (getRunningExpressApp().identityManager.getPlatformType() === Platform.CLOUD) { + const accountService = new AccountService() + const newAccount = await accountService.register(data) + wu = newAccount.workspaceUser + wu.workspace = newAccount.workspace + user = newAccount.user + } + } else { + if (user.status === UserStatus.INVITED) { + const data: any = { + user: { + ...user, + email, + name: profile.displayName || '', + status: UserStatus.ACTIVE, + credential: undefined + } + } + const accountService = new AccountService() + const newAccount = await accountService.register(data) + user = newAccount.user + } + let wsUserOrUsers = await workspaceUserService.readWorkspaceUserByLastLogin(user?.id, queryRunner) + wu = Array.isArray(wsUserOrUsers) && wsUserOrUsers.length > 0 ? wsUserOrUsers[0] : (wsUserOrUsers as WorkspaceUser) + } + + const workspaceUser = wu as WorkspaceUser + let roleService = new RoleService() + const ownerRole = await roleService.readGeneralRoleByName(GeneralRole.OWNER, queryRunner) + const role = await roleService.readRoleById(workspaceUser.roleId, queryRunner) + if (!role) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, RoleErrorMessage.ROLE_NOT_FOUND) + + const workspaceUsers = await workspaceUserService.readWorkspaceUserByUserId(workspaceUser.userId, queryRunner) + const assignedWorkspaces: IAssignedWorkspace[] = workspaceUsers.map((workspaceUser) => { + return { + id: workspaceUser.workspace.id, + name: workspaceUser.workspace.name, + role: workspaceUser.role?.name, + organizationId: workspaceUser.workspace.organizationId + } as IAssignedWorkspace + }) + + const organization = await organizationService.readOrganizationById(workspaceUser.workspace.organizationId, queryRunner) + if (!organization) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, 'Organization not found') + const subscriptionId = organization.subscriptionId as string + const customerId = organization.customerId as string + const features = await getRunningExpressApp().identityManager.getFeaturesByPlan(subscriptionId) + const productId = await getRunningExpressApp().identityManager.getProductIdFromSubscription(subscriptionId) + + const loggedInUser: LoggedInUser = { + id: workspaceUser.userId, + email: user?.email || '', + name: user?.name || '', + roleId: workspaceUser.roleId, + activeOrganizationId: organization.id, + activeOrganizationSubscriptionId: subscriptionId, + activeOrganizationCustomerId: customerId, + activeOrganizationProductId: productId, + isOrganizationAdmin: workspaceUser.roleId === ownerRole?.id, + activeWorkspaceId: workspaceUser.workspaceId, + activeWorkspace: workspaceUser.workspace.name, + assignedWorkspaces, + isApiKeyValidated: true, + ssoToken: accessToken as string, + ssoRefreshToken: refreshToken, + ssoProvider: ssoProviderName, + permissions: [...JSON.parse(role.permissions)], + features + } + return done(null, loggedInUser as Express.User, { message: 'Logged in Successfully' }) + } catch (error) { + return done( + { name: 'SSO_LOGIN_FAILED', message: ssoProviderName + ' Login failed! Please contact your administrator.' }, + undefined + ) + } finally { + if (queryRunner && !queryRunner.isReleased) await queryRunner.release() + } + } +} + +export default SSOBase diff --git a/packages/server/src/enterprise/utils/ControllerServiceUtils.ts b/packages/server/src/enterprise/utils/ControllerServiceUtils.ts new file mode 100644 index 00000000000..245f1e4711e --- /dev/null +++ b/packages/server/src/enterprise/utils/ControllerServiceUtils.ts @@ -0,0 +1,11 @@ +import { Equal } from 'typeorm' +import { Request } from 'express' + +export const getWorkspaceSearchOptions = (workspaceId?: string) => { + return workspaceId ? { workspaceId: Equal(workspaceId) } : {} +} + +export const getWorkspaceSearchOptionsFromReq = (req: Request) => { + const workspaceId = req.user?.activeWorkspaceId + return workspaceId ? { workspaceId: Equal(workspaceId) } : {} +} diff --git a/packages/server/src/enterprise/utils/encryption.util.ts b/packages/server/src/enterprise/utils/encryption.util.ts new file mode 100644 index 00000000000..60c2acc7eca --- /dev/null +++ b/packages/server/src/enterprise/utils/encryption.util.ts @@ -0,0 +1,22 @@ +import bcrypt from 'bcryptjs' +import { AES, enc } from 'crypto-js' +import { getEncryptionKey } from '../../utils' + +export function getHash(value: string) { + const salt = bcrypt.genSaltSync(parseInt(process.env.PASSWORD_SALT_HASH_ROUNDS || '5')) + return bcrypt.hashSync(value, salt) +} + +export function compareHash(value1: string, value2: string) { + return bcrypt.compareSync(value1, value2) +} + +export async function encrypt(value: string) { + const encryptionKey = await getEncryptionKey() + return AES.encrypt(value, encryptionKey).toString() +} + +export async function decrypt(value: string) { + const encryptionKey = await getEncryptionKey() + return AES.decrypt(value, encryptionKey).toString(enc.Utf8) +} diff --git a/packages/server/src/enterprise/utils/sendEmail.ts b/packages/server/src/enterprise/utils/sendEmail.ts new file mode 100644 index 00000000000..2292dd25ff9 --- /dev/null +++ b/packages/server/src/enterprise/utils/sendEmail.ts @@ -0,0 +1,120 @@ +import * as handlebars from 'handlebars' +import nodemailer from 'nodemailer' +import fs from 'node:fs' +import path from 'path' +import { Platform } from '../../Interface' + +const SMTP_HOST = process.env.SMTP_HOST +const SMTP_PORT = parseInt(process.env.SMTP_PORT as string, 10) +const SMTP_USER = process.env.SMTP_USER +const SMTP_PASSWORD = process.env.SMTP_PASSWORD +const SENDER_EMAIL = process.env.SENDER_EMAIL +const SMTP_SECURE = process.env.SMTP_SECURE ? process.env.SMTP_SECURE === 'true' : true +const TLS = process.env.ALLOW_UNAUTHORIZED_CERTS ? { rejectUnauthorized: false } : undefined + +const transporter = nodemailer.createTransport({ + host: SMTP_HOST, + port: SMTP_PORT, + secure: SMTP_SECURE ?? true, + auth: { + user: SMTP_USER, + pass: SMTP_PASSWORD + }, + tls: TLS +}) + +const getEmailTemplate = (defaultTemplateName: string, userTemplatePath?: string) => { + try { + if (userTemplatePath) { + return fs.readFileSync(userTemplatePath, 'utf8') + } + } catch (error) { + console.warn(`Failed to load custom template from ${userTemplatePath}, falling back to default`) + } + return fs.readFileSync(path.join(__dirname, '../', 'emails', defaultTemplateName), 'utf8') +} + +const sendWorkspaceAdd = async (email: string, workspaceName: string, dashboardLink: string) => { + let htmlToSend + let textContent + + const template = getEmailTemplate('workspace_add_cloud.hbs', process.env.WORKSPACE_INVITE_TEMPLATE_PATH) + const compiledWorkspaceInviteTemplateSource = handlebars.compile(template) + htmlToSend = compiledWorkspaceInviteTemplateSource({ workspaceName, dashboardLink }) + textContent = `You have been added to ${workspaceName}. Click here to visit your dashboard: ${dashboardLink}` // plain text body + + await transporter.sendMail({ + from: SENDER_EMAIL || '"FlowiseAI Team" ', // sender address + to: email, + subject: `You have been added to ${workspaceName}`, // Subject line + text: textContent, // plain text body + html: htmlToSend // html body + }) +} + +const sendWorkspaceInvite = async ( + email: string, + workspaceName: string, + registerLink: string, + platform: Platform = Platform.ENTERPRISE, + inviteType: 'new' | 'update' = 'new' +) => { + let htmlToSend + let textContent + + const template = + platform === Platform.ENTERPRISE + ? getEmailTemplate( + inviteType === 'new' ? 'workspace_new_invite_enterprise.hbs' : 'workspace_update_invite_enterprise.hbs', + process.env.WORKSPACE_INVITE_TEMPLATE_PATH + ) + : getEmailTemplate( + inviteType === 'new' ? 'workspace_new_invite_cloud.hbs' : 'workspace_update_invite_cloud.hbs', + process.env.WORKSPACE_INVITE_TEMPLATE_PATH + ) + const compiledWorkspaceInviteTemplateSource = handlebars.compile(template) + htmlToSend = compiledWorkspaceInviteTemplateSource({ workspaceName, registerLink }) + textContent = `You have been invited to ${workspaceName}. Click here to register: ${registerLink}` // plain text body + + await transporter.sendMail({ + from: SENDER_EMAIL || '"FlowiseAI Team" ', // sender address + to: email, + subject: `You have been invited to ${workspaceName}`, // Subject line + text: textContent, // plain text body + html: htmlToSend // html body + }) +} + +const sendPasswordResetEmail = async (email: string, resetLink: string) => { + const passwordResetTemplateSource = fs.readFileSync(path.join(__dirname, '../', 'emails', 'workspace_user_reset_password.hbs'), 'utf8') + const compiledPasswordResetTemplateSource = handlebars.compile(passwordResetTemplateSource) + + const htmlToSend = compiledPasswordResetTemplateSource({ resetLink }) + await transporter.sendMail({ + from: SENDER_EMAIL || '"FlowiseAI Team" ', // sender address + to: email, + subject: 'Reset your password', // Subject line + text: `You requested a link to reset your password. Click here to reset the password: ${resetLink}`, // plain text body + html: htmlToSend // html body + }) +} + +const sendVerificationEmailForCloud = async (email: string, verificationLink: string) => { + let htmlToSend + let textContent + + const template = getEmailTemplate('verify_email_cloud.hbs') + const compiledWorkspaceInviteTemplateSource = handlebars.compile(template) + htmlToSend = compiledWorkspaceInviteTemplateSource({ verificationLink }) + textContent = `To complete your registration, we need to verify your email address. Click here to verify your email address: ${verificationLink}` // plain text body + + await transporter.sendMail({ + from: SENDER_EMAIL || '"FlowiseAI Team" ', // sender address + to: email, + subject: 'Action Required: Please verify your email', // Subject line + text: textContent, // plain text body + html: htmlToSend // html body + }) +} + +export { sendWorkspaceAdd, sendWorkspaceInvite, sendPasswordResetEmail, sendVerificationEmailForCloud } diff --git a/packages/server/src/enterprise/utils/tempTokenUtils.ts b/packages/server/src/enterprise/utils/tempTokenUtils.ts new file mode 100644 index 00000000000..6e6ba8b7645 --- /dev/null +++ b/packages/server/src/enterprise/utils/tempTokenUtils.ts @@ -0,0 +1,102 @@ +import { LoggedInUser } from '../Interface.Enterprise' +import * as crypto from 'crypto' +import moment from 'moment' +import { customAlphabet } from 'nanoid' + +const nanoid = customAlphabet('0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz', 64) + +// Generate a copy of the users without their passwords. +export const generateSafeCopy = (user: Partial, deleteEmail?: boolean): any => { + let _user: any = { ...user } + delete _user.credential + delete _user.tempToken + delete _user.tokenExpiry + if (deleteEmail) { + delete _user.email + } + delete _user.workspaceIds + delete _user.ssoToken + delete _user.ssoRefreshToken + return _user +} + +export const generateTempToken = () => { + // generate a token with nanoid and return it + const token = nanoid() + return token +} + +// Encrypt token with password using crypto.Cipheriv +export const encryptToken = (stringToEncrypt: string) => { + const key = crypto + .createHash('sha256') + .update(process.env.TOKEN_HASH_SECRET || 'Secre$t') + .digest() + + const IV_LENGTH = 16 + const iv = crypto.randomBytes(IV_LENGTH) + const cipher = crypto.createCipheriv('aes-256-cbc', key, iv) + const encrypted = cipher.update(stringToEncrypt) + + const result = Buffer.concat([encrypted, cipher.final()]) + + // formatted string [iv]:[token] + return iv.toString('hex') + ':' + result.toString('hex') +} + +// Decrypt token using the inverse of encryption crypto algorithm +export const decryptToken = (stringToDecrypt: string): string | undefined => { + try { + const key = crypto + .createHash('sha256') + .update(process.env.TOKEN_HASH_SECRET || 'Secre$t') + .digest() + + let textParts = stringToDecrypt.split(':') + let iv = Buffer.from(textParts.shift() as string, 'hex') + let encryptedText = Buffer.from(textParts.join(':'), 'hex') + let decipher = crypto.createDecipheriv('aes-256-cbc', key, iv) + let decrypted = decipher.update(encryptedText) + + const result = Buffer.concat([decrypted, decipher.final()]) + + return result.toString() + } catch (error) { + return undefined + } +} + +// Extract userUUID from decrypted token string +export const getUserUUIDFromToken = (token: string): string | undefined => { + try { + const userUUIDHash = token.split('-')[2] + return Buffer.from(userUUIDHash, 'base64').toString('ascii') + } catch (error) { + return undefined + } +} + +export const isTokenValid = (tokenExpiry: Date, tokenType: TokenType): boolean => { + // Using moment.diff method for retrieve dates difference in hours + const tokenTimestampDate = moment(tokenExpiry) + const now = moment() + + if (tokenType === TokenType.INVITE) { + const expiryInHours = process.env.INVITE_TOKEN_EXPIRY_IN_HOURS ? parseInt(process.env.INVITE_TOKEN_EXPIRY_IN_HOURS) : 24 + // Fail if more than 24 hours + const diff = now.diff(tokenTimestampDate, 'hours') + if (Math.abs(diff) > expiryInHours) return false + } else if (tokenType === TokenType.PASSWORD_RESET) { + const expiryInMins = process.env.PASSWORD_RESET_TOKEN_EXPIRY_IN_MINUTES + ? parseInt(process.env.PASSWORD_RESET_TOKEN_EXPIRY_IN_MINUTES) + : 15 + const diff = now.diff(tokenTimestampDate, 'minutes') + if (Math.abs(diff) > expiryInMins) return false + } + return true +} + +export enum TokenType { + INVITE = 'INVITE', + PASSWORD_RESET = 'PASSWORD_RESET' +} diff --git a/packages/server/src/enterprise/utils/validation.util.ts b/packages/server/src/enterprise/utils/validation.util.ts new file mode 100644 index 00000000000..c22abb1b51c --- /dev/null +++ b/packages/server/src/enterprise/utils/validation.util.ts @@ -0,0 +1,23 @@ +export function isInvalidUUID(id: unknown): boolean { + const regexUUID = /^[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i + return !id || typeof id !== 'string' || !regexUUID.test(id) +} + +export function isInvalidEmail(email: unknown): boolean { + const regexEmail = /^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$/ + return !email || typeof email !== 'string' || email.length > 255 || !regexEmail.test(email) +} + +export function isInvalidName(name: unknown): boolean { + return !name || typeof name !== 'string' || name.length > 100 +} + +export function isInvalidDateTime(dateTime: unknown): boolean { + const regexDateTime = /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d+)?(Z|[+-]\d{2}:\d{2})?$/ + return !dateTime || typeof dateTime !== 'string' || !regexDateTime.test(dateTime) +} + +export function isInvalidPassword(password: unknown): boolean { + const regexPassword = /^(?=.*[a-z])(?=.*[A-Z])(?=.*\d)(?=.*[@$!%*?&-])[A-Za-z\d@$!%*?&-]{8,}$/ + return !password || typeof password !== 'string' || !regexPassword.test(password) +} diff --git a/packages/server/src/index.ts b/packages/server/src/index.ts index fb7618b5f4d..faebc787812 100644 --- a/packages/server/src/index.ts +++ b/packages/server/src/index.ts @@ -1,11 +1,10 @@ -import express from 'express' -import { Request, Response } from 'express' +import express, { Request, Response } from 'express' import path from 'path' import cors from 'cors' import http from 'http' -import basicAuth from 'express-basic-auth' -import { DataSource } from 'typeorm' -import { MODE } from './Interface' +import cookieParser from 'cookie-parser' +import { DataSource, IsNull } from 'typeorm' +import { MODE, Platform } from './Interface' import { getNodeModulesPackagePath, getEncryptionKey } from './utils' import logger, { expressRequestLogger } from './utils/logger' import { getDataSource } from './DataSource' @@ -14,23 +13,34 @@ import { ChatFlow } from './database/entities/ChatFlow' import { CachePool } from './CachePool' import { AbortControllerPool } from './AbortControllerPool' import { RateLimiterManager } from './utils/rateLimit' -import { getAPIKeys } from './utils/apiKey' -import { sanitizeMiddleware, getCorsOptions, getAllowedIframeOrigins } from './utils/XSS' +import { getAllowedIframeOrigins, getCorsOptions, sanitizeMiddleware } from './utils/XSS' import { Telemetry } from './utils/telemetry' import flowiseApiV1Router from './routes' import errorHandlerMiddleware from './middlewares/errors' +import { WHITELIST_URLS } from './utils/constants' +import { initializeJwtCookieMiddleware, verifyToken } from './enterprise/middleware/passport' +import { IdentityManager } from './IdentityManager' import { SSEStreamer } from './utils/SSEStreamer' -import { validateAPIKey } from './utils/validateKey' +import { getAPIKeyWorkspaceID, validateAPIKey } from './utils/validateKey' +import { LoggedInUser } from './enterprise/Interface.Enterprise' import { IMetricsProvider } from './Interface.Metrics' import { Prometheus } from './metrics/Prometheus' import { OpenTelemetry } from './metrics/OpenTelemetry' import { QueueManager } from './queue/QueueManager' import { RedisEventSubscriber } from './queue/RedisEventSubscriber' -import { WHITELIST_URLS } from './utils/constants' import 'global-agent/bootstrap' +import { UsageCacheManager } from './UsageCacheManager' +import { Workspace } from './enterprise/database/entities/workspace.entity' +import { Organization } from './enterprise/database/entities/organization.entity' +import { GeneralRole, Role } from './enterprise/database/entities/role.entity' +import { migrateApiKeysFromJsonToDb } from './utils/apiKey' declare global { namespace Express { + interface User extends LoggedInUser {} + interface Request { + user?: LoggedInUser + } namespace Multer { interface File { bucket: string @@ -57,9 +67,11 @@ export class App { rateLimiterManager: RateLimiterManager AppDataSource: DataSource = getDataSource() sseStreamer: SSEStreamer + identityManager: IdentityManager metricsProvider: IMetricsProvider queueManager: QueueManager redisSubscriber: RedisEventSubscriber + usageCacheManager: UsageCacheManager constructor() { this.app = express() @@ -74,6 +86,9 @@ export class App { // Run Migrations Scripts await this.AppDataSource.runMigrations({ transaction: 'each' }) + // Initialize Identity Manager + this.identityManager = await IdentityManager.getInstance() + // Initialize nodes pool this.nodesPool = new NodesPool() await this.nodesPool.initialize() @@ -81,9 +96,6 @@ export class App { // Initialize abort controllers pool this.abortControllerPool = new AbortControllerPool() - // Initialize API keys - await getAPIKeys() - // Initialize encryption key await getEncryptionKey() @@ -94,6 +106,9 @@ export class App { // Initialize cache pool this.cachePool = new CachePool() + // Initialize usage cache manager + this.usageCacheManager = await UsageCacheManager.getInstance() + // Initialize telemetry this.telemetry = new Telemetry() @@ -108,12 +123,17 @@ export class App { telemetry: this.telemetry, cachePool: this.cachePool, appDataSource: this.AppDataSource, - abortControllerPool: this.abortControllerPool + abortControllerPool: this.abortControllerPool, + usageCacheManager: this.usageCacheManager }) + logger.info('✅ [Queue]: All queues setup successfully') this.redisSubscriber = new RedisEventSubscriber(this.sseStreamer) await this.redisSubscriber.connect() } + // TODO: Remove this by end of 2025 + await migrateApiKeysFromJsonToDb(this.AppDataSource, this.identityManager.getPlatformType()) + logger.info('📦 [server]: Data Source has been initialized!') } catch (error) { logger.error('❌ [server]: Error during Data Source initialization:', error) @@ -125,12 +145,16 @@ export class App { const flowise_file_size_limit = process.env.FLOWISE_FILE_SIZE_LIMIT || '50mb' this.app.use(express.json({ limit: flowise_file_size_limit })) this.app.use(express.urlencoded({ limit: flowise_file_size_limit, extended: true })) - if (process.env.NUMBER_OF_PROXIES && parseInt(process.env.NUMBER_OF_PROXIES) > 0) - this.app.set('trust proxy', parseInt(process.env.NUMBER_OF_PROXIES)) + + // Enhanced trust proxy settings for load balancer + this.app.set('trust proxy', true) // Trust all proxies // Allow access from specified domains this.app.use(cors(getCorsOptions())) + // Parse cookies + this.app.use(cookieParser()) + // Allow embedding from specified domains. this.app.use((req, res, next) => { const allowedOrigins = getAllowedIframeOrigins() @@ -152,70 +176,99 @@ export class App { // Add the sanitizeMiddleware to guard against XSS this.app.use(sanitizeMiddleware) + this.app.use((req, res, next) => { + res.header('Access-Control-Allow-Credentials', 'true') // Allow credentials (cookies, etc.) + if (next) next() + }) + const whitelistURLs = WHITELIST_URLS const URL_CASE_INSENSITIVE_REGEX: RegExp = /\/api\/v1\//i const URL_CASE_SENSITIVE_REGEX: RegExp = /\/api\/v1\// - if (process.env.FLOWISE_USERNAME && process.env.FLOWISE_PASSWORD) { - const username = process.env.FLOWISE_USERNAME - const password = process.env.FLOWISE_PASSWORD - const basicAuthMiddleware = basicAuth({ - users: { [username]: password } - }) - this.app.use(async (req, res, next) => { - // Step 1: Check if the req path contains /api/v1 regardless of case - if (URL_CASE_INSENSITIVE_REGEX.test(req.path)) { - // Step 2: Check if the req path is case sensitive - if (URL_CASE_SENSITIVE_REGEX.test(req.path)) { - // Step 3: Check if the req path is in the whitelist - const isWhitelisted = whitelistURLs.some((url) => req.path.startsWith(url)) - if (isWhitelisted) { - next() - } else if (req.headers['x-request-from'] === 'internal') { - basicAuthMiddleware(req, res, next) - } else { - const isKeyValidated = await validateAPIKey(req) - if (!isKeyValidated) { + await initializeJwtCookieMiddleware(this.app, this.identityManager) + + this.app.use(async (req, res, next) => { + // Step 1: Check if the req path contains /api/v1 regardless of case + if (URL_CASE_INSENSITIVE_REGEX.test(req.path)) { + // Step 2: Check if the req path is casesensitive + if (URL_CASE_SENSITIVE_REGEX.test(req.path)) { + // Step 3: Check if the req path is in the whitelist + const isWhitelisted = whitelistURLs.some((url) => req.path.startsWith(url)) + if (isWhitelisted) { + next() + } else if (req.headers['x-request-from'] === 'internal') { + verifyToken(req, res, next) + } else { + // Only check license validity for non-open-source platforms + if (this.identityManager.getPlatformType() !== Platform.OPEN_SOURCE) { + if (!this.identityManager.isLicenseValid()) { return res.status(401).json({ error: 'Unauthorized Access' }) } - next() } - } else { - return res.status(401).json({ error: 'Unauthorized Access' }) - } - } else { - // If the req path does not contain /api/v1, then allow the request to pass through, example: /assets, /canvas - next() - } - }) - } else { - this.app.use(async (req, res, next) => { - // Step 1: Check if the req path contains /api/v1 regardless of case - if (URL_CASE_INSENSITIVE_REGEX.test(req.path)) { - // Step 2: Check if the req path is case sensitive - if (URL_CASE_SENSITIVE_REGEX.test(req.path)) { - // Step 3: Check if the req path is in the whitelist - const isWhitelisted = whitelistURLs.some((url) => req.path.startsWith(url)) - if (isWhitelisted) { - next() - } else if (req.headers['x-request-from'] === 'internal') { - next() - } else { - const isKeyValidated = await validateAPIKey(req) - if (!isKeyValidated) { + const isKeyValidated = await validateAPIKey(req) + if (!isKeyValidated) { + return res.status(401).json({ error: 'Unauthorized Access' }) + } + const apiKeyWorkSpaceId = await getAPIKeyWorkspaceID(req) + if (apiKeyWorkSpaceId) { + // Find workspace + const workspace = await this.AppDataSource.getRepository(Workspace).findOne({ + where: { id: apiKeyWorkSpaceId } + }) + if (!workspace) { + return res.status(401).json({ error: 'Unauthorized Access' }) + } + + // Find owner role + const ownerRole = await this.AppDataSource.getRepository(Role).findOne({ + where: { name: GeneralRole.OWNER, organizationId: IsNull() } + }) + if (!ownerRole) { + return res.status(401).json({ error: 'Unauthorized Access' }) + } + + // Find organization + const activeOrganizationId = workspace.organizationId as string + const org = await this.AppDataSource.getRepository(Organization).findOne({ + where: { id: activeOrganizationId } + }) + if (!org) { return res.status(401).json({ error: 'Unauthorized Access' }) } + const subscriptionId = org.subscriptionId as string + const customerId = org.customerId as string + const features = await this.identityManager.getFeaturesByPlan(subscriptionId) + const productId = await this.identityManager.getProductIdFromSubscription(subscriptionId) + + // @ts-ignore + req.user = { + permissions: [...JSON.parse(ownerRole.permissions)], + features, + activeOrganizationId: activeOrganizationId, + activeOrganizationSubscriptionId: subscriptionId, + activeOrganizationCustomerId: customerId, + activeOrganizationProductId: productId, + isOrganizationAdmin: true, + activeWorkspaceId: apiKeyWorkSpaceId, + activeWorkspace: workspace.name, + isApiKeyValidated: true + } next() + } else { + return res.status(401).json({ error: 'Unauthorized Access' }) } - } else { - return res.status(401).json({ error: 'Unauthorized Access' }) } } else { - // If the req path does not contain /api/v1, then allow the request to pass through, example: /assets, /canvas - next() + return res.status(401).json({ error: 'Unauthorized Access' }) } - }) - } + } else { + // If the req path does not contain /api/v1, then allow the request to pass through, example: /assets, /canvas + next() + } + }) + + // this is for SSO and must be after the JWT cookie middleware + await this.identityManager.initializeSSO(this.app) if (process.env.ENABLE_METRICS === 'true') { switch (process.env.METRICS_PROVIDER) { @@ -251,7 +304,7 @@ export class App { }) }) - if (process.env.MODE === MODE.QUEUE && process.env.ENABLE_BULLMQ_DASHBOARD === 'true') { + if (process.env.MODE === MODE.QUEUE && process.env.ENABLE_BULLMQ_DASHBOARD === 'true' && !this.identityManager.isCloud()) { this.app.use('/admin/queues', this.queueManager.getBullBoardRouter()) } diff --git a/packages/server/src/middlewares/errors/index.ts b/packages/server/src/middlewares/errors/index.ts index 88b3dd80cb7..44d56abe620 100644 --- a/packages/server/src/middlewares/errors/index.ts +++ b/packages/server/src/middlewares/errors/index.ts @@ -5,15 +5,17 @@ import { InternalFlowiseError } from '../../errors/internalFlowiseError' // we need eslint because we have to pass next arg for the error middleware // eslint-disable-next-line async function errorHandlerMiddleware(err: InternalFlowiseError, req: Request, res: Response, next: NextFunction) { + const statusCode = err.statusCode || StatusCodes.INTERNAL_SERVER_ERROR if (err.message.includes('401 Incorrect API key provided')) err.message = '401 Invalid model key or Incorrect local model configuration.' let displayedError = { - statusCode: err.statusCode || StatusCodes.INTERNAL_SERVER_ERROR, + statusCode, success: false, message: err.message, // Provide error stack trace only in development stack: process.env.NODE_ENV === 'development' ? err.stack : {} } + if (!req.body || !req.body.streaming || req.body.streaming === 'false') { res.setHeader('Content-Type', 'application/json') res.status(displayedError.statusCode).json(displayedError) diff --git a/packages/server/src/queue/PredictionQueue.ts b/packages/server/src/queue/PredictionQueue.ts index 97927eb11b1..af91d8fbadc 100644 --- a/packages/server/src/queue/PredictionQueue.ts +++ b/packages/server/src/queue/PredictionQueue.ts @@ -7,9 +7,11 @@ import { RedisEventPublisher } from './RedisEventPublisher' import { AbortControllerPool } from '../AbortControllerPool' import { BaseQueue } from './BaseQueue' import { RedisOptions } from 'bullmq' +import { UsageCacheManager } from '../UsageCacheManager' import logger from '../utils/logger' import { generateAgentflowv2 as generateAgentflowv2_json } from 'flowise-components' import { databaseEntities } from '../utils' +import { executeCustomNodeFunction } from '../utils/executeCustomNodeFunction' interface PredictionQueueOptions { appDataSource: DataSource @@ -17,6 +19,7 @@ interface PredictionQueueOptions { cachePool: CachePool componentNodes: IComponentNodes abortControllerPool: AbortControllerPool + usageCacheManager: UsageCacheManager } interface IGenerateAgentflowv2Params extends IExecuteFlowParams { @@ -34,6 +37,7 @@ export class PredictionQueue extends BaseQueue { private cachePool: CachePool private appDataSource: DataSource private abortControllerPool: AbortControllerPool + private usageCacheManager: UsageCacheManager private redisPublisher: RedisEventPublisher private queueName: string @@ -45,6 +49,7 @@ export class PredictionQueue extends BaseQueue { this.cachePool = options.cachePool this.appDataSource = options.appDataSource this.abortControllerPool = options.abortControllerPool + this.usageCacheManager = options.usageCacheManager this.redisPublisher = new RedisEventPublisher() this.redisPublisher.connect() } @@ -61,11 +66,12 @@ export class PredictionQueue extends BaseQueue { if (this.appDataSource) data.appDataSource = this.appDataSource if (this.telemetry) data.telemetry = this.telemetry if (this.cachePool) data.cachePool = this.cachePool + if (this.usageCacheManager) data.usageCacheManager = this.usageCacheManager if (this.componentNodes) data.componentNodes = this.componentNodes if (this.redisPublisher) data.sseStreamer = this.redisPublisher if (Object.prototype.hasOwnProperty.call(data, 'isAgentFlowGenerator')) { - logger.info('Generating Agentflow...') + logger.info(`Generating Agentflow...`) const { prompt, componentNodes, toolNodes, selectedChatModel, question } = data as IGenerateAgentflowv2Params const options: Record = { appDataSource: this.appDataSource, @@ -75,6 +81,16 @@ export class PredictionQueue extends BaseQueue { return await generateAgentflowv2_json({ prompt, componentNodes, toolNodes, selectedChatModel }, question, options) } + if (Object.prototype.hasOwnProperty.call(data, 'isExecuteCustomFunction')) { + const executeCustomFunctionData = data as any + logger.info(`[${executeCustomFunctionData.orgId}]: Executing Custom Function...`) + return await executeCustomNodeFunction({ + appDataSource: this.appDataSource, + componentNodes: this.componentNodes, + data: executeCustomFunctionData.data + }) + } + if (this.abortControllerPool) { const abortControllerId = `${data.chatflow.id}_${data.chatId}` const signal = new AbortController() diff --git a/packages/server/src/queue/QueueManager.ts b/packages/server/src/queue/QueueManager.ts index abd657ac6d0..4393056fba7 100644 --- a/packages/server/src/queue/QueueManager.ts +++ b/packages/server/src/queue/QueueManager.ts @@ -10,6 +10,7 @@ import { QueueEventsProducer, RedisOptions } from 'bullmq' import { createBullBoard } from 'bull-board' import { BullMQAdapter } from 'bull-board/bullMQAdapter' import { Express } from 'express' +import { UsageCacheManager } from '../UsageCacheManager' const QUEUE_NAME = process.env.QUEUE_NAME || 'flowise-queue' @@ -96,13 +97,15 @@ export class QueueManager { telemetry, cachePool, appDataSource, - abortControllerPool + abortControllerPool, + usageCacheManager }: { componentNodes: IComponentNodes telemetry: Telemetry cachePool: CachePool appDataSource: DataSource abortControllerPool: AbortControllerPool + usageCacheManager: UsageCacheManager }) { const predictionQueueName = `${QUEUE_NAME}-prediction` const predictionQueue = new PredictionQueue(predictionQueueName, this.connection, { @@ -110,7 +113,8 @@ export class QueueManager { telemetry, cachePool, appDataSource, - abortControllerPool + abortControllerPool, + usageCacheManager }) this.registerQueue('prediction', predictionQueue) this.predictionQueueEventsProducer = new QueueEventsProducer(predictionQueue.getQueueName(), { @@ -122,7 +126,8 @@ export class QueueManager { componentNodes, telemetry, cachePool, - appDataSource + appDataSource, + usageCacheManager }) this.registerQueue('upsert', upsertionQueue) diff --git a/packages/server/src/queue/RedisEventPublisher.ts b/packages/server/src/queue/RedisEventPublisher.ts index c0fce60c58d..c305757add9 100644 --- a/packages/server/src/queue/RedisEventPublisher.ts +++ b/packages/server/src/queue/RedisEventPublisher.ts @@ -13,7 +13,11 @@ export class RedisEventPublisher implements IServerSideEventStreamer { process.env.REDIS_KEEP_ALIVE && !isNaN(parseInt(process.env.REDIS_KEEP_ALIVE, 10)) ? parseInt(process.env.REDIS_KEEP_ALIVE, 10) : undefined - } + }, + pingInterval: + process.env.REDIS_KEEP_ALIVE && !isNaN(parseInt(process.env.REDIS_KEEP_ALIVE, 10)) + ? parseInt(process.env.REDIS_KEEP_ALIVE, 10) + : undefined }) } else { this.redisPublisher = createClient({ @@ -30,7 +34,11 @@ export class RedisEventPublisher implements IServerSideEventStreamer { process.env.REDIS_KEEP_ALIVE && !isNaN(parseInt(process.env.REDIS_KEEP_ALIVE, 10)) ? parseInt(process.env.REDIS_KEEP_ALIVE, 10) : undefined - } + }, + pingInterval: + process.env.REDIS_KEEP_ALIVE && !isNaN(parseInt(process.env.REDIS_KEEP_ALIVE, 10)) + ? parseInt(process.env.REDIS_KEEP_ALIVE, 10) + : undefined }) } } diff --git a/packages/server/src/queue/RedisEventSubscriber.ts b/packages/server/src/queue/RedisEventSubscriber.ts index 5b0331a72b2..49c4cb9efe4 100644 --- a/packages/server/src/queue/RedisEventSubscriber.ts +++ b/packages/server/src/queue/RedisEventSubscriber.ts @@ -15,7 +15,11 @@ export class RedisEventSubscriber { process.env.REDIS_KEEP_ALIVE && !isNaN(parseInt(process.env.REDIS_KEEP_ALIVE, 10)) ? parseInt(process.env.REDIS_KEEP_ALIVE, 10) : undefined - } + }, + pingInterval: + process.env.REDIS_KEEP_ALIVE && !isNaN(parseInt(process.env.REDIS_KEEP_ALIVE, 10)) + ? parseInt(process.env.REDIS_KEEP_ALIVE, 10) + : undefined }) } else { this.redisSubscriber = createClient({ @@ -32,7 +36,11 @@ export class RedisEventSubscriber { process.env.REDIS_KEEP_ALIVE && !isNaN(parseInt(process.env.REDIS_KEEP_ALIVE, 10)) ? parseInt(process.env.REDIS_KEEP_ALIVE, 10) : undefined - } + }, + pingInterval: + process.env.REDIS_KEEP_ALIVE && !isNaN(parseInt(process.env.REDIS_KEEP_ALIVE, 10)) + ? parseInt(process.env.REDIS_KEEP_ALIVE, 10) + : undefined }) } this.sseStreamer = sseStreamer diff --git a/packages/server/src/queue/UpsertQueue.ts b/packages/server/src/queue/UpsertQueue.ts index 451c413e750..de1a0e7db37 100644 --- a/packages/server/src/queue/UpsertQueue.ts +++ b/packages/server/src/queue/UpsertQueue.ts @@ -14,11 +14,13 @@ import { executeUpsert } from '../utils/upsertVector' import { executeDocStoreUpsert, insertIntoVectorStore, previewChunks, processLoader } from '../services/documentstore' import { RedisOptions } from 'bullmq' import logger from '../utils/logger' +import { UsageCacheManager } from '../UsageCacheManager' interface UpsertQueueOptions { appDataSource: DataSource telemetry: Telemetry cachePool: CachePool + usageCacheManager: UsageCacheManager componentNodes: IComponentNodes } @@ -27,6 +29,7 @@ export class UpsertQueue extends BaseQueue { private telemetry: Telemetry private cachePool: CachePool private appDataSource: DataSource + private usageCacheManager: UsageCacheManager private queueName: string constructor(name: string, connection: RedisOptions, options: UpsertQueueOptions) { @@ -36,6 +39,7 @@ export class UpsertQueue extends BaseQueue { this.telemetry = options.telemetry this.cachePool = options.cachePool this.appDataSource = options.appDataSource + this.usageCacheManager = options.usageCacheManager } public getQueueName() { @@ -52,6 +56,7 @@ export class UpsertQueue extends BaseQueue { if (this.appDataSource) data.appDataSource = this.appDataSource if (this.telemetry) data.telemetry = this.telemetry if (this.cachePool) data.cachePool = this.cachePool + if (this.usageCacheManager) data.usageCacheManager = this.usageCacheManager if (this.componentNodes) data.componentNodes = this.componentNodes // document-store/loader/preview diff --git a/packages/server/src/routes/apikey/index.ts b/packages/server/src/routes/apikey/index.ts index dbc043dd59e..ec9f1a2c9e5 100644 --- a/packages/server/src/routes/apikey/index.ts +++ b/packages/server/src/routes/apikey/index.ts @@ -1,18 +1,19 @@ import express from 'express' import apikeyController from '../../controllers/apikey' +import { checkAnyPermission, checkPermission } from '../../enterprise/rbac/PermissionCheck' const router = express.Router() // CREATE -router.post('/', apikeyController.createApiKey) -router.post('/import', apikeyController.importKeys) +router.post('/', checkPermission('apikeys:create'), apikeyController.createApiKey) +router.post('/import', checkPermission('apikeys:import'), apikeyController.importKeys) // READ -router.get('/', apikeyController.getAllApiKeys) +router.get('/', checkPermission('apikeys:view'), apikeyController.getAllApiKeys) // UPDATE -router.put(['/', '/:id'], apikeyController.updateApiKey) +router.put(['/', '/:id'], checkAnyPermission('apikeys:create,apikeys:update'), apikeyController.updateApiKey) // DELETE -router.delete(['/', '/:id'], apikeyController.deleteApiKey) +router.delete(['/', '/:id'], checkPermission('apikeys:delete'), apikeyController.deleteApiKey) export default router diff --git a/packages/server/src/routes/assistants/index.ts b/packages/server/src/routes/assistants/index.ts index f7754bcac8f..5599e895483 100644 --- a/packages/server/src/routes/assistants/index.ts +++ b/packages/server/src/routes/assistants/index.ts @@ -1,20 +1,21 @@ import express from 'express' import assistantsController from '../../controllers/assistants' +import { checkPermission, checkAnyPermission } from '../../enterprise/rbac/PermissionCheck' const router = express.Router() // CREATE -router.post('/', assistantsController.createAssistant) +router.post('/', checkPermission('assistants:create'), assistantsController.createAssistant) // READ -router.get('/', assistantsController.getAllAssistants) -router.get(['/', '/:id'], assistantsController.getAssistantById) +router.get('/', checkPermission('assistants:view'), assistantsController.getAllAssistants) +router.get(['/', '/:id'], checkPermission('assistants:view'), assistantsController.getAssistantById) // UPDATE -router.put(['/', '/:id'], assistantsController.updateAssistant) +router.put(['/', '/:id'], checkAnyPermission('assistants:create,assistants:update'), assistantsController.updateAssistant) // DELETE -router.delete(['/', '/:id'], assistantsController.deleteAssistant) +router.delete(['/', '/:id'], checkPermission('assistants:delete'), assistantsController.deleteAssistant) router.get('/components/chatmodels', assistantsController.getChatModels) router.get('/components/docstores', assistantsController.getDocumentStores) diff --git a/packages/server/src/routes/chatflows/index.ts b/packages/server/src/routes/chatflows/index.ts index b0c53506749..d654309da46 100644 --- a/packages/server/src/routes/chatflows/index.ts +++ b/packages/server/src/routes/chatflows/index.ts @@ -1,20 +1,24 @@ import express from 'express' import chatflowsController from '../../controllers/chatflows' +import { checkAnyPermission, checkPermission } from '../../enterprise/rbac/PermissionCheck' const router = express.Router() // CREATE -router.post('/', chatflowsController.saveChatflow) -router.post('/importchatflows', chatflowsController.importChatflows) +router.post('/', checkAnyPermission('chatflows:create,chatflows:update'), chatflowsController.saveChatflow) +router.post('/importchatflows', checkPermission('chatflows:import'), chatflowsController.importChatflows) // READ -router.get('/', chatflowsController.getAllChatflows) -router.get(['/', '/:id'], chatflowsController.getChatflowById) +router.get('/', checkAnyPermission('chatflows:view,chatflows:update'), chatflowsController.getAllChatflows) +router.get(['/', '/:id'], checkAnyPermission('chatflows:view,chatflows:update,chatflows:delete'), chatflowsController.getChatflowById) router.get(['/apikey/', '/apikey/:apikey'], chatflowsController.getChatflowByApiKey) // UPDATE -router.put(['/', '/:id'], chatflowsController.updateChatflow) +router.put(['/', '/:id'], checkAnyPermission('chatflows:create,chatflows:update'), chatflowsController.updateChatflow) // DELETE -router.delete(['/', '/:id'], chatflowsController.deleteChatflow) +router.delete(['/', '/:id'], checkPermission('chatflows:delete'), chatflowsController.deleteChatflow) + +// CHECK FOR CHANGE +router.get('/has-changed/:id/:lastUpdatedDateTime', chatflowsController.checkIfChatflowHasChanged) export default router diff --git a/packages/server/src/routes/credentials/index.ts b/packages/server/src/routes/credentials/index.ts index 9f118b49d12..6e97547db07 100644 --- a/packages/server/src/routes/credentials/index.ts +++ b/packages/server/src/routes/credentials/index.ts @@ -1,18 +1,19 @@ import express from 'express' import credentialsController from '../../controllers/credentials' +import { checkPermission, checkAnyPermission } from '../../enterprise/rbac/PermissionCheck' const router = express.Router() // CREATE -router.post('/', credentialsController.createCredential) +router.post('/', checkPermission('credentials:create'), credentialsController.createCredential) // READ -router.get('/', credentialsController.getAllCredentials) -router.get(['/', '/:id'], credentialsController.getCredentialById) +router.get('/', checkPermission('credentials:view'), credentialsController.getAllCredentials) +router.get(['/', '/:id'], checkPermission('credentials:view'), credentialsController.getCredentialById) // UPDATE -router.put(['/', '/:id'], credentialsController.updateCredential) +router.put(['/', '/:id'], checkAnyPermission('credentials:create,credentials:update'), credentialsController.updateCredential) // DELETE -router.delete(['/', '/:id'], credentialsController.deleteCredentials) +router.delete(['/', '/:id'], checkPermission('credentials:delete'), credentialsController.deleteCredentials) export default router diff --git a/packages/server/src/routes/dataset/index.ts b/packages/server/src/routes/dataset/index.ts new file mode 100644 index 00000000000..7b73e546ac4 --- /dev/null +++ b/packages/server/src/routes/dataset/index.ts @@ -0,0 +1,29 @@ +import express from 'express' +import datasetController from '../../controllers/dataset' +import { checkAnyPermission, checkPermission } from '../../enterprise/rbac/PermissionCheck' +const router = express.Router() + +// get all datasets +router.get('/', checkPermission('datasets:view'), datasetController.getAllDatasets) +// get new dataset +router.get(['/set', '/set/:id'], checkPermission('datasets:view'), datasetController.getDataset) +// Create new dataset +router.post(['/set', '/set/:id'], checkPermission('datasets:create'), datasetController.createDataset) +// Update dataset +router.put(['/set', '/set/:id'], checkAnyPermission('datasets:create,datasets:update'), datasetController.updateDataset) +// Delete dataset via id +router.delete(['/set', '/set/:id'], checkPermission('datasets:delete'), datasetController.deleteDataset) + +// Create new row in a given dataset +router.post(['/rows', '/rows/:id'], checkPermission('datasets:create'), datasetController.addDatasetRow) +// Update row for a dataset +router.put(['/rows', '/rows/:id'], checkAnyPermission('datasets:create,datasets:update'), datasetController.updateDatasetRow) +// Delete dataset row via id +router.delete(['/rows', '/rows/:id'], checkPermission('datasets:delete'), datasetController.deleteDatasetRow) +// PATCH delete by ids +router.patch('/rows', checkPermission('datasets:delete'), datasetController.patchDeleteRows) + +// Update row for a dataset +router.post(['/reorder', '/reorder'], checkAnyPermission('datasets:create,datasets:update'), datasetController.reorderDatasetRow) + +export default router diff --git a/packages/server/src/routes/documentstore/index.ts b/packages/server/src/routes/documentstore/index.ts index 6fb911941ab..41ce33bb859 100644 --- a/packages/server/src/routes/documentstore/index.ts +++ b/packages/server/src/routes/documentstore/index.ts @@ -1,4 +1,5 @@ import express from 'express' +import { checkPermission, checkAnyPermission } from '../../enterprise/rbac/PermissionCheck' import documentStoreController from '../../controllers/documentstore' import { getMulterStorage } from '../../utils' @@ -10,56 +11,72 @@ router.post(['/refresh/', '/refresh/:id'], documentStoreController.refreshDocSto /** Document Store Routes */ // Create document store -router.post('/store', documentStoreController.createDocumentStore) +router.post('/store', checkPermission('documentStores:create'), documentStoreController.createDocumentStore) // List all stores -router.get('/store', documentStoreController.getAllDocumentStores) +router.get('/store', checkPermission('documentStores:view'), documentStoreController.getAllDocumentStores) // Get specific store -router.get('/store/:id', documentStoreController.getDocumentStoreById) +router.get( + '/store/:id', + checkAnyPermission('documentStores:view,documentStores:update,documentStores:delete'), + documentStoreController.getDocumentStoreById +) // Update documentStore -router.put('/store/:id', documentStoreController.updateDocumentStore) +router.put('/store/:id', checkAnyPermission('documentStores:create,documentStores:update'), documentStoreController.updateDocumentStore) // Delete documentStore -router.delete('/store/:id', documentStoreController.deleteDocumentStore) +router.delete('/store/:id', checkPermission('documentStores:delete'), documentStoreController.deleteDocumentStore) // Get document store configs -router.get('/store-configs/:id/:loaderId', documentStoreController.getDocStoreConfigs) +router.get('/store-configs/:id/:loaderId', checkAnyPermission('documentStores:view'), documentStoreController.getDocStoreConfigs) /** Component Nodes = Document Store - Loaders */ // Get all loaders -router.get('/components/loaders', documentStoreController.getDocumentLoaders) +router.get('/components/loaders', checkPermission('documentStores:add-loader'), documentStoreController.getDocumentLoaders) // delete loader from document store -router.delete('/loader/:id/:loaderId', documentStoreController.deleteLoaderFromDocumentStore) +router.delete( + '/loader/:id/:loaderId', + checkPermission('documentStores:delete-loader'), + documentStoreController.deleteLoaderFromDocumentStore +) // chunking preview -router.post('/loader/preview', documentStoreController.previewFileChunks) +router.post('/loader/preview', checkPermission('documentStores:preview-process'), documentStoreController.previewFileChunks) // saving process -router.post('/loader/save', documentStoreController.saveProcessingLoader) +router.post('/loader/save', checkPermission('documentStores:preview-process'), documentStoreController.saveProcessingLoader) // chunking process -router.post('/loader/process/:loaderId', documentStoreController.processLoader) +router.post('/loader/process/:loaderId', checkPermission('documentStores:preview-process'), documentStoreController.processLoader) /** Document Store - Loaders - Chunks */ // delete specific file chunk from the store -router.delete('/chunks/:storeId/:loaderId/:chunkId', documentStoreController.deleteDocumentStoreFileChunk) +router.delete( + '/chunks/:storeId/:loaderId/:chunkId', + checkAnyPermission('documentStores:update,documentStores:delete'), + documentStoreController.deleteDocumentStoreFileChunk +) // edit specific file chunk from the store -router.put('/chunks/:storeId/:loaderId/:chunkId', documentStoreController.editDocumentStoreFileChunk) +router.put( + '/chunks/:storeId/:loaderId/:chunkId', + checkPermission('documentStores:update'), + documentStoreController.editDocumentStoreFileChunk +) // Get all file chunks from the store -router.get('/chunks/:storeId/:fileId/:pageNo', documentStoreController.getDocumentStoreFileChunks) +router.get('/chunks/:storeId/:fileId/:pageNo', checkPermission('documentStores:view'), documentStoreController.getDocumentStoreFileChunks) // add chunks to the selected vector store -router.post('/vectorstore/insert', documentStoreController.insertIntoVectorStore) +router.post('/vectorstore/insert', checkPermission('documentStores:upsert-config'), documentStoreController.insertIntoVectorStore) // save the selected vector store -router.post('/vectorstore/save', documentStoreController.saveVectorStoreConfig) +router.post('/vectorstore/save', checkPermission('documentStores:upsert-config'), documentStoreController.saveVectorStoreConfig) // delete data from the selected vector store -router.delete('/vectorstore/:storeId', documentStoreController.deleteVectorStoreFromStore) +router.delete('/vectorstore/:storeId', checkPermission('documentStores:upsert-config'), documentStoreController.deleteVectorStoreFromStore) // query the vector store -router.post('/vectorstore/query', documentStoreController.queryVectorStore) +router.post('/vectorstore/query', checkPermission('documentStores:view'), documentStoreController.queryVectorStore) // Get all embedding providers -router.get('/components/embeddings', documentStoreController.getEmbeddingProviders) +router.get('/components/embeddings', checkPermission('documentStores:upsert-config'), documentStoreController.getEmbeddingProviders) // Get all vector store providers -router.get('/components/vectorstore', documentStoreController.getVectorStoreProviders) +router.get('/components/vectorstore', checkPermission('documentStores:upsert-config'), documentStoreController.getVectorStoreProviders) // Get all Record Manager providers -router.get('/components/recordmanager', documentStoreController.getRecordManagerProviders) +router.get('/components/recordmanager', checkPermission('documentStores:upsert-config'), documentStoreController.getRecordManagerProviders) // update the selected vector store from the playground -router.post('/vectorstore/update', documentStoreController.updateVectorStoreConfigOnly) +router.post('/vectorstore/update', checkPermission('documentStores:upsert-config'), documentStoreController.updateVectorStoreConfigOnly) // generate docstore tool description router.post('/generate-tool-desc/:id', documentStoreController.generateDocStoreToolDesc) diff --git a/packages/server/src/routes/evaluations/index.ts b/packages/server/src/routes/evaluations/index.ts new file mode 100644 index 00000000000..b2bbc4764d4 --- /dev/null +++ b/packages/server/src/routes/evaluations/index.ts @@ -0,0 +1,14 @@ +import express from 'express' +import evaluationsController from '../../controllers/evaluations' +import { checkPermission, checkAnyPermission } from '../../enterprise/rbac/PermissionCheck' +const router = express.Router() + +router.get('/', checkPermission('evaluations:view'), evaluationsController.getAllEvaluations) +router.get('/:id', checkPermission('evaluations:view'), evaluationsController.getEvaluation) +router.delete('/:id', checkPermission('evaluations:delete'), evaluationsController.deleteEvaluation) +router.post('/', checkPermission('evaluations:create'), evaluationsController.createEvaluation) +router.get('/is-outdated/:id', evaluationsController.isOutdated) +router.get('/run-again/:id', checkAnyPermission('evaluations:create,evaluations:run'), evaluationsController.runAgain) +router.get('/versions/:id', checkPermission('evaluations:view'), evaluationsController.getVersions) +router.patch('/', checkPermission('evaluations:delete'), evaluationsController.patchDeleteEvaluations) +export default router diff --git a/packages/server/src/routes/evaluator/index.ts b/packages/server/src/routes/evaluator/index.ts new file mode 100644 index 00000000000..481cbaf8e08 --- /dev/null +++ b/packages/server/src/routes/evaluator/index.ts @@ -0,0 +1,17 @@ +import express from 'express' +import evaluatorsController from '../../controllers/evaluators' +import { checkPermission, checkAnyPermission } from '../../enterprise/rbac/PermissionCheck' +const router = express.Router() + +// get all datasets +router.get('/', checkPermission('evaluators:view'), evaluatorsController.getAllEvaluators) +// get new dataset +router.get(['/', '/:id'], checkPermission('evaluators:view'), evaluatorsController.getEvaluator) +// Create new dataset +router.post(['/', '/:id'], checkPermission('evaluators:create'), evaluatorsController.createEvaluator) +// Update dataset +router.put(['/', '/:id'], checkAnyPermission('evaluators:create,evaluators:update'), evaluatorsController.updateEvaluator) +// Delete dataset via id +router.delete(['/', '/:id'], checkPermission('evaluators:delete'), evaluatorsController.deleteEvaluator) + +export default router diff --git a/packages/server/src/routes/executions/index.ts b/packages/server/src/routes/executions/index.ts index 589bbf13a2f..6106613a19c 100644 --- a/packages/server/src/routes/executions/index.ts +++ b/packages/server/src/routes/executions/index.ts @@ -1,16 +1,17 @@ import express from 'express' import executionController from '../../controllers/executions' +import { checkAnyPermission } from '../../enterprise/rbac/PermissionCheck' const router = express.Router() // READ -router.get('/', executionController.getAllExecutions) -router.get(['/', '/:id'], executionController.getExecutionById) +router.get('/', checkAnyPermission('executions:view'), executionController.getAllExecutions) +router.get(['/', '/:id'], checkAnyPermission('executions:view'), executionController.getExecutionById) // PUT router.put(['/', '/:id'], executionController.updateExecution) // DELETE - single execution or multiple executions -router.delete('/:id', executionController.deleteExecutions) -router.delete('/', executionController.deleteExecutions) +router.delete('/:id', checkAnyPermission('executions:delete'), executionController.deleteExecutions) +router.delete('/', checkAnyPermission('executions:delete'), executionController.deleteExecutions) export default router diff --git a/packages/server/src/routes/export-import/index.ts b/packages/server/src/routes/export-import/index.ts index 40c3930d2fd..17b28a7c346 100644 --- a/packages/server/src/routes/export-import/index.ts +++ b/packages/server/src/routes/export-import/index.ts @@ -1,9 +1,10 @@ import express from 'express' import exportImportController from '../../controllers/export-import' +import { checkPermission } from '../../enterprise/rbac/PermissionCheck' const router = express.Router() -router.post('/export', exportImportController.exportData) +router.post('/export', checkPermission('workspace:export'), exportImportController.exportData) -router.post('/import', exportImportController.importData) +router.post('/import', checkPermission('workspace:import'), exportImportController.importData) export default router diff --git a/packages/server/src/routes/files/index.ts b/packages/server/src/routes/files/index.ts new file mode 100644 index 00000000000..3a48183d48e --- /dev/null +++ b/packages/server/src/routes/files/index.ts @@ -0,0 +1,11 @@ +import express from 'express' +import filesController from '../../controllers/files' +const router = express.Router() + +// READ +router.get('/', filesController.getAllFiles) + +// DELETE +router.delete('/', filesController.deleteFile) + +export default router diff --git a/packages/server/src/routes/index.ts b/packages/server/src/routes/index.ts index 42a5d231207..63633e7b6f9 100644 --- a/packages/server/src/routes/index.ts +++ b/packages/server/src/routes/index.ts @@ -9,10 +9,14 @@ import chatflowsUploadsRouter from './chatflows-uploads' import componentsCredentialsRouter from './components-credentials' import componentsCredentialsIconRouter from './components-credentials-icon' import credentialsRouter from './credentials' +import datasetRouter from './dataset' import documentStoreRouter from './documentstore' +import evaluationsRouter from './evaluations' +import evaluatorsRouter from './evaluator' import exportImportRouter from './export-import' import feedbackRouter from './feedback' import fetchLinksRouter from './fetch-links' +import filesRouter from './files' import flowConfigRouter from './flow-config' import getUploadFileRouter from './get-upload-file' import getUploadPathRouter from './get-upload-path' @@ -20,6 +24,7 @@ import internalChatmessagesRouter from './internal-chat-messages' import internalPredictionRouter from './internal-predictions' import leadsRouter from './leads' import loadPromptRouter from './load-prompts' +import logsRouter from './log' import marketplacesRouter from './marketplaces' import nodeConfigRouter from './node-configs' import nodeCustomFunctionRouter from './node-custom-functions' @@ -36,6 +41,7 @@ import promptListsRouter from './prompts-lists' import publicChatbotRouter from './public-chatbots' import publicChatflowsRouter from './public-chatflows' import publicExecutionsRouter from './public-executions' +import settingsRouter from './settings' import statsRouter from './stats' import toolsRouter from './tools' import upsertHistoryRouter from './upsert-history' @@ -43,11 +49,24 @@ import variablesRouter from './variables' import vectorRouter from './vectors' import verifyRouter from './verify' import versionRouter from './versions' +import pricingRouter from './pricing' import nvidiaNimRouter from './nvidia-nim' import executionsRouter from './executions' import validationRouter from './validation' import agentflowv2GeneratorRouter from './agentflowv2-generator' +import authRouter from '../enterprise/routes/auth' +import auditRouter from '../enterprise/routes/audit' +import userRouter from '../enterprise/routes/user.route' +import organizationRouter from '../enterprise/routes/organization.route' +import roleRouter from '../enterprise/routes/role.route' +import organizationUserRoute from '../enterprise/routes/organization-user.route' +import workspaceRouter from '../enterprise/routes/workspace.route' +import workspaceUserRouter from '../enterprise/routes/workspace-user.route' +import accountRouter from '../enterprise/routes/account.route' +import loginMethodRouter from '../enterprise/routes/login-method.route' +import { IdentityManager } from '../IdentityManager' + const router = express.Router() router.use('/ping', pingRouter) @@ -57,11 +76,14 @@ router.use('/attachments', attachmentsRouter) router.use('/chatflows', chatflowsRouter) router.use('/chatflows-streaming', chatflowsStreamingRouter) router.use('/chatmessage', chatMessageRouter) +router.use('/chatflows-uploads', chatflowsUploadsRouter) router.use('/components-credentials', componentsCredentialsRouter) router.use('/components-credentials-icon', componentsCredentialsIconRouter) -router.use('/chatflows-uploads', chatflowsUploadsRouter) router.use('/credentials', credentialsRouter) +router.use('/datasets', IdentityManager.checkFeatureByPlan('feat:datasets'), datasetRouter) router.use('/document-store', documentStoreRouter) +router.use('/evaluations', IdentityManager.checkFeatureByPlan('feat:evaluations'), evaluationsRouter) +router.use('/evaluators', IdentityManager.checkFeatureByPlan('feat:evaluators'), evaluatorsRouter) router.use('/export-import', exportImportRouter) router.use('/feedback', feedbackRouter) router.use('/fetch-links', fetchLinksRouter) @@ -94,9 +116,24 @@ router.use('/vector', vectorRouter) router.use('/verify', verifyRouter) router.use('/version', versionRouter) router.use('/upsert-history', upsertHistoryRouter) +router.use('/settings', settingsRouter) +router.use('/pricing', pricingRouter) router.use('/nvidia-nim', nvidiaNimRouter) router.use('/executions', executionsRouter) router.use('/validation', validationRouter) router.use('/agentflowv2-generator', agentflowv2GeneratorRouter) +router.use('/auth', authRouter) +router.use('/audit', IdentityManager.checkFeatureByPlan('feat:login-activity'), auditRouter) +router.use('/user', userRouter) +router.use('/organization', organizationRouter) +router.use('/role', IdentityManager.checkFeatureByPlan('feat:roles'), roleRouter) +router.use('/organizationuser', organizationUserRoute) +router.use('/workspace', workspaceRouter) +router.use('/workspaceuser', workspaceUserRouter) +router.use('/account', accountRouter) +router.use('/loginmethod', loginMethodRouter) +router.use('/logs', IdentityManager.checkFeatureByPlan('feat:logs'), logsRouter) +router.use('/files', IdentityManager.checkFeatureByPlan('feat:files'), filesRouter) + export default router diff --git a/packages/server/src/routes/log/index.ts b/packages/server/src/routes/log/index.ts new file mode 100644 index 00000000000..290004f5a76 --- /dev/null +++ b/packages/server/src/routes/log/index.ts @@ -0,0 +1,9 @@ +import express from 'express' +import logController from '../../controllers/log' +import { checkAnyPermission } from '../../enterprise/rbac/PermissionCheck' +const router = express.Router() + +// READ +router.get('/', checkAnyPermission('logs:view'), logController.getLogs) + +export default router diff --git a/packages/server/src/routes/marketplaces/index.ts b/packages/server/src/routes/marketplaces/index.ts index d97f96f3830..050140358ff 100644 --- a/packages/server/src/routes/marketplaces/index.ts +++ b/packages/server/src/routes/marketplaces/index.ts @@ -1,16 +1,17 @@ import express from 'express' import marketplacesController from '../../controllers/marketplaces' +import { checkPermission, checkAnyPermission } from '../../enterprise/rbac/PermissionCheck' const router = express.Router() // READ -router.get('/templates', marketplacesController.getAllTemplates) +router.get('/templates', checkPermission('templates:marketplace'), marketplacesController.getAllTemplates) -router.post('/custom', marketplacesController.saveCustomTemplate) +router.post('/custom', checkAnyPermission('templates:flowexport,templates:toolexport'), marketplacesController.saveCustomTemplate) // READ -router.get('/custom', marketplacesController.getAllCustomTemplates) +router.get('/custom', checkPermission('templates:custom'), marketplacesController.getAllCustomTemplates) // DELETE -router.delete(['/', '/custom/:id'], marketplacesController.deleteCustomTemplate) +router.delete(['/', '/custom/:id'], checkPermission('templates:custom-delete'), marketplacesController.deleteCustomTemplate) export default router diff --git a/packages/server/src/routes/pricing/index.ts b/packages/server/src/routes/pricing/index.ts new file mode 100644 index 00000000000..ce82a6fdad1 --- /dev/null +++ b/packages/server/src/routes/pricing/index.ts @@ -0,0 +1,8 @@ +import express from 'express' +import pricingController from '../../controllers/pricing' +const router = express.Router() + +// GET +router.get('/', pricingController.getPricing) + +export default router diff --git a/packages/server/src/routes/settings/index.ts b/packages/server/src/routes/settings/index.ts new file mode 100644 index 00000000000..e311c76b4c9 --- /dev/null +++ b/packages/server/src/routes/settings/index.ts @@ -0,0 +1,8 @@ +import express from 'express' +import settingsController from '../../controllers/settings' +const router = express.Router() + +// CREATE +router.get('/', settingsController.getSettingsList) + +export default router diff --git a/packages/server/src/routes/tools/index.ts b/packages/server/src/routes/tools/index.ts index e97fb5cf46d..81ff9029242 100644 --- a/packages/server/src/routes/tools/index.ts +++ b/packages/server/src/routes/tools/index.ts @@ -1,19 +1,20 @@ import express from 'express' import toolsController from '../../controllers/tools' +import { checkAnyPermission, checkPermission } from '../../enterprise/rbac/PermissionCheck' const router = express.Router() // CREATE -router.post('/', toolsController.createTool) +router.post('/', checkPermission('tools:create'), toolsController.createTool) // READ -router.get('/', toolsController.getAllTools) -router.get(['/', '/:id'], toolsController.getToolById) +router.get('/', checkPermission('tools:view'), toolsController.getAllTools) +router.get(['/', '/:id'], checkAnyPermission('tools:view'), toolsController.getToolById) // UPDATE -router.put(['/', '/:id'], toolsController.updateTool) +router.put(['/', '/:id'], checkAnyPermission('tools:update,tools:create'), toolsController.updateTool) // DELETE -router.delete(['/', '/:id'], toolsController.deleteTool) +router.delete(['/', '/:id'], checkPermission('tools:delete'), toolsController.deleteTool) export default router diff --git a/packages/server/src/routes/variables/index.ts b/packages/server/src/routes/variables/index.ts index f6d3625a454..20ab6e1356b 100644 --- a/packages/server/src/routes/variables/index.ts +++ b/packages/server/src/routes/variables/index.ts @@ -1,18 +1,19 @@ import express from 'express' import variablesController from '../../controllers/variables' +import { checkAnyPermission, checkPermission } from '../../enterprise/rbac/PermissionCheck' const router = express.Router() // CREATE -router.post('/', variablesController.createVariable) +router.post('/', checkPermission('variables:create'), variablesController.createVariable) // READ -router.get('/', variablesController.getAllVariables) +router.get('/', checkPermission('variables:view'), variablesController.getAllVariables) // UPDATE -router.put(['/', '/:id'], variablesController.updateVariable) +router.put(['/', '/:id'], checkAnyPermission('variables:create,variables:update'), variablesController.updateVariable) // DELETE -router.delete(['/', '/:id'], variablesController.deleteVariable) +router.delete(['/', '/:id'], checkPermission('variables:delete'), variablesController.deleteVariable) export default router diff --git a/packages/server/src/services/apikey/index.ts b/packages/server/src/services/apikey/index.ts index 9ab92edf829..a63d315a58e 100644 --- a/packages/server/src/services/apikey/index.ts +++ b/packages/server/src/services/apikey/index.ts @@ -1,48 +1,29 @@ import { StatusCodes } from 'http-status-codes' -import { - addAPIKey as addAPIKey_json, - deleteAPIKey as deleteAPIKey_json, - generateAPIKey, - generateSecretHash, - getApiKey as getApiKey_json, - getAPIKeys as getAPIKeys_json, - updateAPIKey as updateAPIKey_json, - replaceAllAPIKeys as replaceAllAPIKeys_json, - importKeys as importKeys_json -} from '../../utils/apiKey' +import { generateAPIKey, generateSecretHash } from '../../utils/apiKey' import { addChatflowsCount } from '../../utils/addChatflowsCount' import { InternalFlowiseError } from '../../errors/internalFlowiseError' import { getErrorMessage } from '../../errors/utils' import { getRunningExpressApp } from '../../utils/getRunningExpressApp' import { ApiKey } from '../../database/entities/ApiKey' -import { appConfig } from '../../AppConfig' -import { randomBytes } from 'crypto' import { Not, IsNull } from 'typeorm' +import { getWorkspaceSearchOptions } from '../../enterprise/utils/ControllerServiceUtils' +import { v4 as uuidv4 } from 'uuid' -const _apikeysStoredInJson = (): boolean => { - return appConfig.apiKeys.storageType === 'json' +const getAllApiKeysFromDB = async (workspaceId?: string) => { + const appServer = getRunningExpressApp() + const keys = await appServer.AppDataSource.getRepository(ApiKey).findBy(getWorkspaceSearchOptions(workspaceId)) + const keysWithChatflows = await addChatflowsCount(keys) + return keysWithChatflows } -const _apikeysStoredInDb = (): boolean => { - return appConfig.apiKeys.storageType === 'db' -} - -const getAllApiKeys = async () => { +const getAllApiKeys = async (workspaceId?: string, autoCreateNewKey?: boolean) => { try { - if (_apikeysStoredInJson()) { - const keys = await getAPIKeys_json() - return await addChatflowsCount(keys) - } else if (_apikeysStoredInDb()) { - const appServer = getRunningExpressApp() - let keys = await appServer.AppDataSource.getRepository(ApiKey).find() - if (keys.length === 0) { - await createApiKey('DefaultKey') - keys = await appServer.AppDataSource.getRepository(ApiKey).find() - } - return await addChatflowsCount(keys) - } else { - throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `UNKNOWN APIKEY_STORAGE_TYPE`) + let keys = await getAllApiKeysFromDB(workspaceId) + if (keys.length === 0 && autoCreateNewKey) { + await createApiKey('DefaultKey', workspaceId) + keys = await getAllApiKeysFromDB(workspaceId) } + return keys } catch (error) { throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `Error: apikeyService.getAllApiKeys - ${getErrorMessage(error)}`) } @@ -50,90 +31,64 @@ const getAllApiKeys = async () => { const getApiKey = async (apiKey: string) => { try { - if (_apikeysStoredInJson()) { - return getApiKey_json(apiKey) - } else if (_apikeysStoredInDb()) { - const appServer = getRunningExpressApp() - const currentKey = await appServer.AppDataSource.getRepository(ApiKey).findOneBy({ - apiKey: apiKey - }) - if (!currentKey) { - return undefined - } - return currentKey - } else { - throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `UNKNOWN APIKEY_STORAGE_TYPE`) + const appServer = getRunningExpressApp() + const currentKey = await appServer.AppDataSource.getRepository(ApiKey).findOneBy({ + apiKey: apiKey + }) + if (!currentKey) { + return undefined } + return currentKey } catch (error) { - throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `Error: apikeyService.createApiKey - ${getErrorMessage(error)}`) + throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `Error: apikeyService.getApiKey - ${getErrorMessage(error)}`) } } -const createApiKey = async (keyName: string) => { +const createApiKey = async (keyName: string, workspaceId?: string) => { try { - if (_apikeysStoredInJson()) { - const keys = await addAPIKey_json(keyName) - return await addChatflowsCount(keys) - } else if (_apikeysStoredInDb()) { - const apiKey = generateAPIKey() - const apiSecret = generateSecretHash(apiKey) - const appServer = getRunningExpressApp() - const newKey = new ApiKey() - newKey.id = randomBytes(16).toString('hex') - newKey.apiKey = apiKey - newKey.apiSecret = apiSecret - newKey.keyName = keyName - const key = appServer.AppDataSource.getRepository(ApiKey).create(newKey) - await appServer.AppDataSource.getRepository(ApiKey).save(key) - return getAllApiKeys() - } else { - throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `UNKNOWN APIKEY_STORAGE_TYPE`) - } + const apiKey = generateAPIKey() + const apiSecret = generateSecretHash(apiKey) + const appServer = getRunningExpressApp() + const newKey = new ApiKey() + newKey.id = uuidv4() + newKey.apiKey = apiKey + newKey.apiSecret = apiSecret + newKey.keyName = keyName + newKey.workspaceId = workspaceId + const key = appServer.AppDataSource.getRepository(ApiKey).create(newKey) + await appServer.AppDataSource.getRepository(ApiKey).save(key) + return await getAllApiKeysFromDB(workspaceId) } catch (error) { throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `Error: apikeyService.createApiKey - ${getErrorMessage(error)}`) } } // Update api key -const updateApiKey = async (id: string, keyName: string) => { +const updateApiKey = async (id: string, keyName: string, workspaceId?: string) => { try { - if (_apikeysStoredInJson()) { - const keys = await updateAPIKey_json(id, keyName) - return await addChatflowsCount(keys) - } else if (_apikeysStoredInDb()) { - const appServer = getRunningExpressApp() - const currentKey = await appServer.AppDataSource.getRepository(ApiKey).findOneBy({ - id: id - }) - if (!currentKey) { - throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `ApiKey ${currentKey} not found`) - } - currentKey.keyName = keyName - await appServer.AppDataSource.getRepository(ApiKey).save(currentKey) - return getAllApiKeys() - } else { - throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `UNKNOWN APIKEY_STORAGE_TYPE`) + const appServer = getRunningExpressApp() + const currentKey = await appServer.AppDataSource.getRepository(ApiKey).findOneBy({ + id: id + }) + if (!currentKey) { + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `ApiKey ${currentKey} not found`) } + currentKey.keyName = keyName + await appServer.AppDataSource.getRepository(ApiKey).save(currentKey) + return await getAllApiKeysFromDB(workspaceId) } catch (error) { throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `Error: apikeyService.updateApiKey - ${getErrorMessage(error)}`) } } -const deleteApiKey = async (id: string) => { +const deleteApiKey = async (id: string, workspaceId?: string) => { try { - if (_apikeysStoredInJson()) { - const keys = await deleteAPIKey_json(id) - return await addChatflowsCount(keys) - } else if (_apikeysStoredInDb()) { - const appServer = getRunningExpressApp() - const dbResponse = await appServer.AppDataSource.getRepository(ApiKey).delete({ id: id }) - if (!dbResponse) { - throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `ApiKey ${id} not found`) - } - return getAllApiKeys() - } else { - throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `UNKNOWN APIKEY_STORAGE_TYPE`) + const appServer = getRunningExpressApp() + const dbResponse = await appServer.AppDataSource.getRepository(ApiKey).delete({ id, workspaceId }) + if (!dbResponse) { + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `ApiKey ${id} not found`) } + return dbResponse } catch (error) { throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `Error: apikeyService.deleteApiKey - ${getErrorMessage(error)}`) } @@ -142,6 +97,7 @@ const deleteApiKey = async (id: string) => { const importKeys = async (body: any) => { try { const jsonFile = body.jsonFile + const workspaceId = body.workspaceId const splitDataURI = jsonFile.split(',') if (splitDataURI[0] !== 'data:application/json;base64') { throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `Invalid dataURI`) @@ -149,70 +105,98 @@ const importKeys = async (body: any) => { const bf = Buffer.from(splitDataURI[1] || '', 'base64') const plain = bf.toString('utf8') const keys = JSON.parse(plain) - if (_apikeysStoredInJson()) { - if (body.importMode === 'replaceAll') { - await replaceAllAPIKeys_json(keys) - } else { - await importKeys_json(keys, body.importMode) - } - return await addChatflowsCount(keys) - } else if (_apikeysStoredInDb()) { - const appServer = getRunningExpressApp() - const allApiKeys = await appServer.AppDataSource.getRepository(ApiKey).find() - if (body.importMode === 'replaceAll') { - await appServer.AppDataSource.getRepository(ApiKey).delete({ - id: Not(IsNull()) - }) + + // Validate schema of imported keys + if (!Array.isArray(keys)) { + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, `Invalid format: Expected an array of API keys`) + } + + const requiredFields = ['keyName', 'apiKey', 'apiSecret', 'createdAt', 'id'] + for (let i = 0; i < keys.length; i++) { + const key = keys[i] + if (typeof key !== 'object' || key === null) { + throw new InternalFlowiseError(StatusCodes.BAD_REQUEST, `Invalid format: Key at index ${i} is not an object`) } - if (body.importMode === 'errorIfExist') { - // if importMode is errorIfExist, check for existing keys and raise error before any modification to the DB - for (const key of keys) { - const keyNameExists = allApiKeys.find((k) => k.keyName === key.keyName) - if (keyNameExists) { - throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `Key with name ${key.keyName} already exists`) - } + + for (const field of requiredFields) { + if (!(field in key)) { + throw new InternalFlowiseError( + StatusCodes.BAD_REQUEST, + `Invalid format: Key at index ${i} is missing required field '${field}'` + ) + } + if (typeof key[field] !== 'string') { + throw new InternalFlowiseError( + StatusCodes.BAD_REQUEST, + `Invalid format: Key at index ${i} field '${field}' must be a string` + ) + } + if (key[field].trim() === '') { + throw new InternalFlowiseError( + StatusCodes.BAD_REQUEST, + `Invalid format: Key at index ${i} field '${field}' cannot be empty` + ) } } - // iterate through the keys and add them to the database + } + + const appServer = getRunningExpressApp() + const allApiKeys = await appServer.AppDataSource.getRepository(ApiKey).findBy(getWorkspaceSearchOptions(workspaceId)) + if (body.importMode === 'replaceAll') { + await appServer.AppDataSource.getRepository(ApiKey).delete({ + id: Not(IsNull()), + workspaceId: workspaceId + }) + } + if (body.importMode === 'errorIfExist') { + // if importMode is errorIfExist, check for existing keys and raise error before any modification to the DB for (const key of keys) { const keyNameExists = allApiKeys.find((k) => k.keyName === key.keyName) if (keyNameExists) { - const keyIndex = allApiKeys.findIndex((k) => k.keyName === key.keyName) - switch (body.importMode) { - case 'overwriteIfExist': { - const currentKey = allApiKeys[keyIndex] - currentKey.id = key.id - currentKey.apiKey = key.apiKey - currentKey.apiSecret = key.apiSecret - await appServer.AppDataSource.getRepository(ApiKey).save(currentKey) - break - } - case 'ignoreIfExist': { - // ignore this key and continue - continue - } - case 'errorIfExist': { - // should not reach here as we have already checked for existing keys - throw new Error(`Key with name ${key.keyName} already exists`) - } - default: { - throw new Error(`Unknown overwrite option ${body.importMode}`) - } + throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `Key with name ${key.keyName} already exists`) + } + } + } + // iterate through the keys and add them to the database + for (const key of keys) { + const keyNameExists = allApiKeys.find((k) => k.keyName === key.keyName) + if (keyNameExists) { + const keyIndex = allApiKeys.findIndex((k) => k.keyName === key.keyName) + switch (body.importMode) { + case 'overwriteIfExist': + case 'replaceAll': { + const currentKey = allApiKeys[keyIndex] + currentKey.id = uuidv4() + currentKey.apiKey = key.apiKey + currentKey.apiSecret = key.apiSecret + currentKey.workspaceId = workspaceId + await appServer.AppDataSource.getRepository(ApiKey).save(currentKey) + break + } + case 'ignoreIfExist': { + // ignore this key and continue + continue + } + case 'errorIfExist': { + // should not reach here as we have already checked for existing keys + throw new Error(`Key with name ${key.keyName} already exists`) + } + default: { + throw new Error(`Unknown overwrite option ${body.importMode}`) } - } else { - const newKey = new ApiKey() - newKey.id = key.id - newKey.apiKey = key.apiKey - newKey.apiSecret = key.apiSecret - newKey.keyName = key.keyName - const newKeyEntity = appServer.AppDataSource.getRepository(ApiKey).create(newKey) - await appServer.AppDataSource.getRepository(ApiKey).save(newKeyEntity) } + } else { + const newKey = new ApiKey() + newKey.id = uuidv4() + newKey.apiKey = key.apiKey + newKey.apiSecret = key.apiSecret + newKey.keyName = key.keyName + newKey.workspaceId = workspaceId + const newKeyEntity = appServer.AppDataSource.getRepository(ApiKey).create(newKey) + await appServer.AppDataSource.getRepository(ApiKey).save(newKeyEntity) } - return getAllApiKeys() - } else { - throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `UNKNOWN APIKEY_STORAGE_TYPE`) } + return await getAllApiKeysFromDB(workspaceId) } catch (error) { throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `Error: apikeyService.importKeys - ${getErrorMessage(error)}`) } @@ -220,24 +204,14 @@ const importKeys = async (body: any) => { const verifyApiKey = async (paramApiKey: string): Promise => { try { - if (_apikeysStoredInJson()) { - const apiKey = await getApiKey_json(paramApiKey) - if (!apiKey) { - throw new InternalFlowiseError(StatusCodes.UNAUTHORIZED, `Unauthorized`) - } - return 'OK' - } else if (_apikeysStoredInDb()) { - const appServer = getRunningExpressApp() - const apiKey = await appServer.AppDataSource.getRepository(ApiKey).findOneBy({ - apiKey: paramApiKey - }) - if (!apiKey) { - throw new InternalFlowiseError(StatusCodes.UNAUTHORIZED, `Unauthorized`) - } - return 'OK' - } else { - throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `UNKNOWN APIKEY_STORAGE_TYPE`) + const appServer = getRunningExpressApp() + const apiKey = await appServer.AppDataSource.getRepository(ApiKey).findOneBy({ + apiKey: paramApiKey + }) + if (!apiKey) { + throw new InternalFlowiseError(StatusCodes.UNAUTHORIZED, `Unauthorized`) } + return 'OK' } catch (error) { if (error instanceof InternalFlowiseError && error.statusCode === StatusCodes.UNAUTHORIZED) { throw error diff --git a/packages/server/src/services/assistants/index.ts b/packages/server/src/services/assistants/index.ts index 1ac9fff56f2..72dfc4a008a 100644 --- a/packages/server/src/services/assistants/index.ts +++ b/packages/server/src/services/assistants/index.ts @@ -1,24 +1,26 @@ -import OpenAI from 'openai' +import { ICommonObject } from 'flowise-components' import { StatusCodes } from 'http-status-codes' -import { uniqWith, isEqual, cloneDeep } from 'lodash' -import { getRunningExpressApp } from '../../utils/getRunningExpressApp' +import { cloneDeep, isEqual, uniqWith } from 'lodash' +import OpenAI from 'openai' +import { DeleteResult, In, QueryRunner } from 'typeorm' import { Assistant } from '../../database/entities/Assistant' import { Credential } from '../../database/entities/Credential' -import { databaseEntities, decryptCredentialData, getAppVersion } from '../../utils' +import { DocumentStore } from '../../database/entities/DocumentStore' +import { Workspace } from '../../enterprise/database/entities/workspace.entity' +import { getWorkspaceSearchOptions } from '../../enterprise/utils/ControllerServiceUtils' import { InternalFlowiseError } from '../../errors/internalFlowiseError' import { getErrorMessage } from '../../errors/utils' -import { DeleteResult, QueryRunner } from 'typeorm' -import { FLOWISE_METRIC_COUNTERS, FLOWISE_COUNTER_STATUS } from '../../Interface.Metrics' import { AssistantType } from '../../Interface' -import nodesService from '../nodes' -import { DocumentStore } from '../../database/entities/DocumentStore' -import { ICommonObject } from 'flowise-components' +import { FLOWISE_COUNTER_STATUS, FLOWISE_METRIC_COUNTERS } from '../../Interface.Metrics' +import { databaseEntities, decryptCredentialData, getAppVersion } from '../../utils' +import { INPUT_PARAMS_TYPE } from '../../utils/constants' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' import logger from '../../utils/logger' import { ASSISTANT_PROMPT_GENERATOR } from '../../utils/prompt' -import { INPUT_PARAMS_TYPE } from '../../utils/constants' -import { validate } from 'uuid' +import { checkUsageLimit } from '../../utils/quotaUsage' +import nodesService from '../nodes' -const createAssistant = async (requestBody: any): Promise => { +const createAssistant = async (requestBody: any, orgId: string): Promise => { try { const appServer = getRunningExpressApp() if (!requestBody.details) { @@ -33,10 +35,14 @@ const createAssistant = async (requestBody: any): Promise => { const assistant = appServer.AppDataSource.getRepository(Assistant).create(newAssistant) const dbResponse = await appServer.AppDataSource.getRepository(Assistant).save(assistant) - await appServer.telemetry.sendTelemetry('assistant_created', { - version: await getAppVersion(), - assistantId: dbResponse.id - }) + await appServer.telemetry.sendTelemetry( + 'assistant_created', + { + version: await getAppVersion(), + assistantId: dbResponse.id + }, + orgId + ) appServer.metricsProvider?.incrementCounter(FLOWISE_METRIC_COUNTERS.ASSISTANT_CREATED, { status: FLOWISE_COUNTER_STATUS.SUCCESS }) @@ -134,11 +140,17 @@ const createAssistant = async (requestBody: any): Promise => { const assistant = appServer.AppDataSource.getRepository(Assistant).create(newAssistant) const dbResponse = await appServer.AppDataSource.getRepository(Assistant).save(assistant) - await appServer.telemetry.sendTelemetry('assistant_created', { - version: await getAppVersion(), - assistantId: dbResponse.id - }) + await appServer.telemetry.sendTelemetry( + 'assistant_created', + { + version: await getAppVersion(), + assistantId: dbResponse.id + }, + orgId + ) + appServer.metricsProvider?.incrementCounter(FLOWISE_METRIC_COUNTERS.ASSISTANT_CREATED, { status: FLOWISE_COUNTER_STATUS.SUCCESS }) + return dbResponse } catch (error) { throw new InternalFlowiseError( @@ -193,16 +205,37 @@ const deleteAssistant = async (assistantId: string, isDeleteBoth: any): Promise< } } -const getAllAssistants = async (type?: AssistantType): Promise => { +async function getAssistantsCountByOrganization(type: AssistantType, organizationId: string): Promise { + try { + const appServer = getRunningExpressApp() + + const workspaces = await appServer.AppDataSource.getRepository(Workspace).findBy({ organizationId }) + const workspaceIds = workspaces.map((workspace) => workspace.id) + const assistantsCount = await appServer.AppDataSource.getRepository(Assistant).countBy({ + type, + workspaceId: In(workspaceIds) + }) + + return assistantsCount + } catch (error) { + throw new InternalFlowiseError( + StatusCodes.INTERNAL_SERVER_ERROR, + `Error: assistantsService.getAssistantsCountByOrganization - ${getErrorMessage(error)}` + ) + } +} + +const getAllAssistants = async (type?: AssistantType, workspaceId?: string): Promise => { try { const appServer = getRunningExpressApp() if (type) { const dbResponse = await appServer.AppDataSource.getRepository(Assistant).findBy({ - type + type, + ...getWorkspaceSearchOptions(workspaceId) }) return dbResponse } - const dbResponse = await appServer.AppDataSource.getRepository(Assistant).find() + const dbResponse = await appServer.AppDataSource.getRepository(Assistant).findBy(getWorkspaceSearchOptions(workspaceId)) return dbResponse } catch (error) { throw new InternalFlowiseError( @@ -212,6 +245,26 @@ const getAllAssistants = async (type?: AssistantType): Promise => { } } +const getAllAssistantsCount = async (type?: AssistantType, workspaceId?: string): Promise => { + try { + const appServer = getRunningExpressApp() + if (type) { + const dbResponse = await appServer.AppDataSource.getRepository(Assistant).countBy({ + type, + ...getWorkspaceSearchOptions(workspaceId) + }) + return dbResponse + } + const dbResponse = await appServer.AppDataSource.getRepository(Assistant).countBy(getWorkspaceSearchOptions(workspaceId)) + return dbResponse + } catch (error) { + throw new InternalFlowiseError( + StatusCodes.INTERNAL_SERVER_ERROR, + `Error: assistantsService.getAllAssistantsCount - ${getErrorMessage(error)}` + ) + } +} + const getAssistantById = async (assistantId: string): Promise => { try { const appServer = getRunningExpressApp() @@ -338,20 +391,22 @@ const updateAssistant = async (assistantId: string, requestBody: any): Promise[], queryRunner?: QueryRunner): Promise => { +const importAssistants = async ( + newAssistants: Partial[], + orgId: string, + _: string, + subscriptionId: string, + queryRunner?: QueryRunner +): Promise => { try { - for (const data of newAssistants) { - if (data.id && !validate(data.id)) { - throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: importAssistants - invalid id!`) - } - } - const appServer = getRunningExpressApp() const repository = queryRunner ? queryRunner.manager.getRepository(Assistant) : appServer.AppDataSource.getRepository(Assistant) // step 1 - check whether array is zero if (newAssistants.length == 0) return + await checkUsageLimit('flows', subscriptionId, appServer.usageCacheManager, newAssistants.length) + // step 2 - check whether ids are duplicate in database let ids = '(' let count: number = 0 @@ -406,10 +461,10 @@ const getChatModels = async (): Promise => { } } -const getDocumentStores = async (): Promise => { +const getDocumentStores = async (activeWorkspaceId?: string): Promise => { try { const appServer = getRunningExpressApp() - const stores = await appServer.AppDataSource.getRepository(DocumentStore).find() + const stores = await appServer.AppDataSource.getRepository(DocumentStore).findBy(getWorkspaceSearchOptions(activeWorkspaceId)) const returnData = [] for (const store of stores) { if (store.status === 'UPSERTED') { @@ -492,11 +547,13 @@ export default { createAssistant, deleteAssistant, getAllAssistants, + getAllAssistantsCount, getAssistantById, updateAssistant, importAssistants, getChatModels, getDocumentStores, getTools, - generateAssistantInstruction + generateAssistantInstruction, + getAssistantsCountByOrganization } diff --git a/packages/server/src/services/chat-messages/index.ts b/packages/server/src/services/chat-messages/index.ts index 1ee804e0b18..b2b2c00a372 100644 --- a/packages/server/src/services/chat-messages/index.ts +++ b/packages/server/src/services/chat-messages/index.ts @@ -1,15 +1,17 @@ import { removeFilesFromStorage } from 'flowise-components' import { StatusCodes } from 'http-status-codes' -import { DeleteResult, FindOptionsWhere } from 'typeorm' +import { DeleteResult, FindOptionsWhere, In } from 'typeorm' import { ChatMessage } from '../../database/entities/ChatMessage' import { ChatMessageFeedback } from '../../database/entities/ChatMessageFeedback' import { InternalFlowiseError } from '../../errors/internalFlowiseError' import { getErrorMessage } from '../../errors/utils' import { ChatMessageRatingType, ChatType, IChatMessage, MODE } from '../../Interface' +import { UsageCacheManager } from '../../UsageCacheManager' import { utilAddChatMessage } from '../../utils/addChatMesage' import { utilGetChatMessage } from '../../utils/getChatMessage' import { getRunningExpressApp } from '../../utils/getRunningExpressApp' import logger from '../../utils/logger' +import { updateStorageUsage } from '../../utils/quotaUsage' // Add chatmessages for chatflowid const createChatMessage = async (chatMessage: Partial) => { @@ -36,7 +38,8 @@ const getAllChatMessages = async ( endDate?: string, messageId?: string, feedback?: boolean, - feedbackTypes?: ChatMessageRatingType[] + feedbackTypes?: ChatMessageRatingType[], + activeWorkspaceId?: string ): Promise => { try { const dbResponse = await utilGetChatMessage({ @@ -50,7 +53,8 @@ const getAllChatMessages = async ( endDate, messageId, feedback, - feedbackTypes + feedbackTypes, + activeWorkspaceId }) return dbResponse } catch (error) { @@ -73,7 +77,8 @@ const getAllInternalChatMessages = async ( endDate?: string, messageId?: string, feedback?: boolean, - feedbackTypes?: ChatMessageRatingType[] + feedbackTypes?: ChatMessageRatingType[], + activeWorkspaceId?: string ): Promise => { try { const dbResponse = await utilGetChatMessage({ @@ -87,7 +92,8 @@ const getAllInternalChatMessages = async ( endDate, messageId, feedback, - feedbackTypes + feedbackTypes, + activeWorkspaceId }) return dbResponse } catch (error) { @@ -101,7 +107,10 @@ const getAllInternalChatMessages = async ( const removeAllChatMessages = async ( chatId: string, chatflowid: string, - deleteOptions: FindOptionsWhere + deleteOptions: FindOptionsWhere, + orgId: string, + workspaceId: string, + usageCacheManager: UsageCacheManager ): Promise => { try { const appServer = getRunningExpressApp() @@ -113,12 +122,12 @@ const removeAllChatMessages = async ( // Delete all uploads corresponding to this chatflow/chatId if (chatId) { try { - await removeFilesFromStorage(chatflowid, chatId) + const { totalSize } = await removeFilesFromStorage(orgId, chatflowid, chatId) + await updateStorageUsage(orgId, workspaceId, totalSize, usageCacheManager) } catch (e) { - logger.error(`[server]: Error deleting file storage for chatflow ${chatflowid}, chatId ${chatId}: ${e}`) + logger.error(`[server]: Error deleting file storage for chatflow ${chatflowid}, chatId ${chatId}`) } } - const dbResponse = await appServer.AppDataSource.getRepository(ChatMessage).delete(deleteOptions) return dbResponse } catch (error) { @@ -132,7 +141,10 @@ const removeAllChatMessages = async ( const removeChatMessagesByMessageIds = async ( chatflowid: string, chatIdMap: Map, - messageIds: string[] + messageIds: string[], + orgId: string, + workspaceId: string, + usageCacheManager: UsageCacheManager ): Promise => { try { const appServer = getRunningExpressApp() @@ -149,7 +161,8 @@ const removeChatMessagesByMessageIds = async ( await appServer.AppDataSource.getRepository(ChatMessageFeedback).delete(feedbackDeleteOptions) // Delete all uploads corresponding to this chatflow/chatId - await removeFilesFromStorage(chatflowid, chatId) + const { totalSize } = await removeFilesFromStorage(orgId, chatflowid, chatId) + await updateStorageUsage(orgId, workspaceId, totalSize, usageCacheManager) } // Delete executions if they exist @@ -188,14 +201,14 @@ const abortChatMessage = async (chatId: string, chatflowid: string) => { } } -async function getAllMessages(): Promise { +async function getMessagesByChatflowIds(chatflowIds: string[]): Promise { const appServer = getRunningExpressApp() - return await appServer.AppDataSource.getRepository(ChatMessage).find() + return await appServer.AppDataSource.getRepository(ChatMessage).find({ where: { chatflowid: In(chatflowIds) } }) } -async function getAllMessagesFeedback(): Promise { +async function getMessagesFeedbackByChatflowIds(chatflowIds: string[]): Promise { const appServer = getRunningExpressApp() - return await appServer.AppDataSource.getRepository(ChatMessageFeedback).find() + return await appServer.AppDataSource.getRepository(ChatMessageFeedback).find({ where: { chatflowid: In(chatflowIds) } }) } export default { @@ -205,6 +218,6 @@ export default { removeAllChatMessages, removeChatMessagesByMessageIds, abortChatMessage, - getAllMessages, - getAllMessagesFeedback + getMessagesByChatflowIds, + getMessagesFeedbackByChatflowIds } diff --git a/packages/server/src/services/chatflows/index.ts b/packages/server/src/services/chatflows/index.ts index 1367f42fc91..9900b7c1b7b 100644 --- a/packages/server/src/services/chatflows/index.ts +++ b/packages/server/src/services/chatflows/index.ts @@ -1,12 +1,15 @@ import { ICommonObject, removeFolderFromStorage } from 'flowise-components' import { StatusCodes } from 'http-status-codes' -import { QueryRunner } from 'typeorm' +import { In, QueryRunner } from 'typeorm' import { ChatflowType, IReactFlowObject } from '../../Interface' import { FLOWISE_COUNTER_STATUS, FLOWISE_METRIC_COUNTERS } from '../../Interface.Metrics' +import { UsageCacheManager } from '../../UsageCacheManager' import { ChatFlow } from '../../database/entities/ChatFlow' import { ChatMessage } from '../../database/entities/ChatMessage' import { ChatMessageFeedback } from '../../database/entities/ChatMessageFeedback' import { UpsertHistory } from '../../database/entities/UpsertHistory' +import { Workspace } from '../../enterprise/database/entities/workspace.entity' +import { getWorkspaceSearchOptions } from '../../enterprise/utils/ControllerServiceUtils' import { InternalFlowiseError } from '../../errors/internalFlowiseError' import { getErrorMessage } from '../../errors/utils' import documentStoreService from '../../services/documentstore' @@ -15,7 +18,7 @@ import { containsBase64File, updateFlowDataWithFilePaths } from '../../utils/fil import { getRunningExpressApp } from '../../utils/getRunningExpressApp' import { utilGetUploadsConfig } from '../../utils/getUploadsConfig' import logger from '../../utils/logger' -import { validate } from 'uuid' +import { checkUsageLimit, updateStorageUsage } from '../../utils/quotaUsage' // Check if chatflow valid for streaming const checkIfChatflowIsValidForStreaming = async (chatflowId: string): Promise => { @@ -90,25 +93,30 @@ const checkIfChatflowIsValidForUploads = async (chatflowId: string): Promise => { +const deleteChatflow = async (chatflowId: string, orgId: string, workspaceId: string): Promise => { try { const appServer = getRunningExpressApp() + const dbResponse = await appServer.AppDataSource.getRepository(ChatFlow).delete({ id: chatflowId }) - try { - // Delete all uploads corresponding to this chatflow - await removeFolderFromStorage(chatflowId) - await documentStoreService.updateDocumentStoreUsage(chatflowId, undefined) - // Delete all chat messages - await appServer.AppDataSource.getRepository(ChatMessage).delete({ chatflowid: chatflowId }) + // Update document store usage + await documentStoreService.updateDocumentStoreUsage(chatflowId, undefined, workspaceId) + + // Delete all chat messages + await appServer.AppDataSource.getRepository(ChatMessage).delete({ chatflowid: chatflowId }) - // Delete all chat feedback - await appServer.AppDataSource.getRepository(ChatMessageFeedback).delete({ chatflowid: chatflowId }) + // Delete all chat feedback + await appServer.AppDataSource.getRepository(ChatMessageFeedback).delete({ chatflowid: chatflowId }) - // Delete all upsert history - await appServer.AppDataSource.getRepository(UpsertHistory).delete({ chatflowid: chatflowId }) + // Delete all upsert history + await appServer.AppDataSource.getRepository(UpsertHistory).delete({ chatflowid: chatflowId }) + + try { + // Delete all uploads corresponding to this chatflow + const { totalSize } = await removeFolderFromStorage(orgId, chatflowId) + await updateStorageUsage(orgId, workspaceId, totalSize, appServer.usageCacheManager) } catch (e) { - logger.error(`[server]: Error deleting file storage for chatflow ${chatflowId}: ${e}`) + logger.error(`[server]: Error deleting file storage for chatflow ${chatflowId}`) } return dbResponse } catch (error) { @@ -119,10 +127,10 @@ const deleteChatflow = async (chatflowId: string): Promise => { } } -const getAllChatflows = async (type?: ChatflowType): Promise => { +const getAllChatflows = async (type?: ChatflowType, workspaceId?: string): Promise => { try { const appServer = getRunningExpressApp() - const dbResponse = await appServer.AppDataSource.getRepository(ChatFlow).find() + const dbResponse = await appServer.AppDataSource.getRepository(ChatFlow).findBy(getWorkspaceSearchOptions(workspaceId)) if (type === 'MULTIAGENT') { return dbResponse.filter((chatflow) => chatflow.type === 'MULTIAGENT') } else if (type === 'AGENTFLOW') { @@ -142,6 +150,46 @@ const getAllChatflows = async (type?: ChatflowType): Promise => { } } +async function getAllChatflowsCountByOrganization(type: ChatflowType, organizationId: string): Promise { + try { + const appServer = getRunningExpressApp() + + const workspaces = await appServer.AppDataSource.getRepository(Workspace).findBy({ organizationId }) + const workspaceIds = workspaces.map((workspace) => workspace.id) + const chatflowsCount = await appServer.AppDataSource.getRepository(ChatFlow).countBy({ + type, + workspaceId: In(workspaceIds) + }) + + return chatflowsCount + } catch (error) { + throw new InternalFlowiseError( + StatusCodes.INTERNAL_SERVER_ERROR, + `Error: chatflowsService.getAllChatflowsCountByOrganization - ${getErrorMessage(error)}` + ) + } +} + +const getAllChatflowsCount = async (type?: ChatflowType, workspaceId?: string): Promise => { + try { + const appServer = getRunningExpressApp() + if (type) { + const dbResponse = await appServer.AppDataSource.getRepository(ChatFlow).countBy({ + type, + ...getWorkspaceSearchOptions(workspaceId) + }) + return dbResponse + } + const dbResponse = await appServer.AppDataSource.getRepository(ChatFlow).countBy(getWorkspaceSearchOptions(workspaceId)) + return dbResponse + } catch (error) { + throw new InternalFlowiseError( + StatusCodes.INTERNAL_SERVER_ERROR, + `Error: chatflowsService.getAllChatflowsCount - ${getErrorMessage(error)}` + ) + } +} + const getChatflowByApiKey = async (apiKeyId: string, keyonly?: unknown): Promise => { try { // Here we only get chatflows that are bounded by the apikeyid and chatflows that are not bounded by any apikey @@ -184,9 +232,16 @@ const getChatflowById = async (chatflowId: string): Promise => { } } -const saveChatflow = async (newChatFlow: ChatFlow): Promise => { +const saveChatflow = async ( + newChatFlow: ChatFlow, + orgId: string, + workspaceId: string, + subscriptionId: string, + usageCacheManager: UsageCacheManager +): Promise => { try { const appServer = getRunningExpressApp() + let dbResponse: ChatFlow if (containsBase64File(newChatFlow)) { // we need a 2-step process, as we need to save the chatflow first and then update the file paths @@ -199,18 +254,30 @@ const saveChatflow = async (newChatFlow: ChatFlow): Promise => { const step1Results = await appServer.AppDataSource.getRepository(ChatFlow).save(chatflow) // step 2 - convert base64 to file paths and update the chatflow - step1Results.flowData = await updateFlowDataWithFilePaths(step1Results.id, incomingFlowData) - await _checkAndUpdateDocumentStoreUsage(step1Results) + step1Results.flowData = await updateFlowDataWithFilePaths( + step1Results.id, + incomingFlowData, + orgId, + workspaceId, + subscriptionId, + usageCacheManager + ) + await _checkAndUpdateDocumentStoreUsage(step1Results, newChatFlow.workspaceId) dbResponse = await appServer.AppDataSource.getRepository(ChatFlow).save(step1Results) } else { const chatflow = appServer.AppDataSource.getRepository(ChatFlow).create(newChatFlow) dbResponse = await appServer.AppDataSource.getRepository(ChatFlow).save(chatflow) } - await appServer.telemetry.sendTelemetry('chatflow_created', { - version: await getAppVersion(), - chatflowId: dbResponse.id, - flowGraph: getTelemetryFlowObj(JSON.parse(dbResponse.flowData)?.nodes, JSON.parse(dbResponse.flowData)?.edges) - }) + await appServer.telemetry.sendTelemetry( + 'chatflow_created', + { + version: await getAppVersion(), + chatflowId: dbResponse.id, + flowGraph: getTelemetryFlowObj(JSON.parse(dbResponse.flowData)?.nodes, JSON.parse(dbResponse.flowData)?.edges) + }, + orgId + ) + appServer.metricsProvider?.incrementCounter( dbResponse?.type === 'MULTIAGENT' ? FLOWISE_METRIC_COUNTERS.AGENTFLOW_CREATED : FLOWISE_METRIC_COUNTERS.CHATFLOW_CREATED, { status: FLOWISE_COUNTER_STATUS.SUCCESS } @@ -225,20 +292,22 @@ const saveChatflow = async (newChatFlow: ChatFlow): Promise => { } } -const importChatflows = async (newChatflows: Partial[], queryRunner?: QueryRunner): Promise => { +const importChatflows = async ( + newChatflows: Partial[], + orgId: string, + _: string, + subscriptionId: string, + queryRunner?: QueryRunner +): Promise => { try { - for (const data of newChatflows) { - if (data.id && !validate(data.id)) { - throw new InternalFlowiseError(StatusCodes.PRECONDITION_FAILED, `Error: importChatflows - invalid id!`) - } - } - const appServer = getRunningExpressApp() const repository = queryRunner ? queryRunner.manager.getRepository(ChatFlow) : appServer.AppDataSource.getRepository(ChatFlow) // step 1 - check whether file chatflows array is zero if (newChatflows.length == 0) return + await checkUsageLimit('flows', subscriptionId, appServer.usageCacheManager, newChatflows.length) + // step 2 - check whether ids are duplicate in database let ids = '(' let count: number = 0 @@ -281,14 +350,27 @@ const importChatflows = async (newChatflows: Partial[], queryRunner?: } } -const updateChatflow = async (chatflow: ChatFlow, updateChatFlow: ChatFlow): Promise => { +const updateChatflow = async ( + chatflow: ChatFlow, + updateChatFlow: ChatFlow, + orgId: string, + workspaceId: string, + subscriptionId: string +): Promise => { try { const appServer = getRunningExpressApp() if (updateChatFlow.flowData && containsBase64File(updateChatFlow)) { - updateChatFlow.flowData = await updateFlowDataWithFilePaths(chatflow.id, updateChatFlow.flowData) + updateChatFlow.flowData = await updateFlowDataWithFilePaths( + chatflow.id, + updateChatFlow.flowData, + orgId, + workspaceId, + subscriptionId, + appServer.usageCacheManager + ) } const newDbChatflow = appServer.AppDataSource.getRepository(ChatFlow).merge(chatflow, updateChatFlow) - await _checkAndUpdateDocumentStoreUsage(newDbChatflow) + await _checkAndUpdateDocumentStoreUsage(newDbChatflow, chatflow.workspaceId) const dbResponse = await appServer.AppDataSource.getRepository(ChatFlow).save(newDbChatflow) return dbResponse @@ -356,15 +438,36 @@ const getSinglePublicChatbotConfig = async (chatflowId: string): Promise => } } -const _checkAndUpdateDocumentStoreUsage = async (chatflow: ChatFlow) => { +const _checkAndUpdateDocumentStoreUsage = async (chatflow: ChatFlow, workspaceId?: string) => { const parsedFlowData: IReactFlowObject = JSON.parse(chatflow.flowData) const nodes = parsedFlowData.nodes // from the nodes array find if there is a node with name == documentStore) const node = nodes.length > 0 && nodes.find((node) => node.data.name === 'documentStore') if (!node || !node.data || !node.data.inputs || node.data.inputs['selectedStore'] === undefined) { - await documentStoreService.updateDocumentStoreUsage(chatflow.id, undefined) + await documentStoreService.updateDocumentStoreUsage(chatflow.id, undefined, workspaceId) } else { - await documentStoreService.updateDocumentStoreUsage(chatflow.id, node.data.inputs['selectedStore']) + await documentStoreService.updateDocumentStoreUsage(chatflow.id, node.data.inputs['selectedStore'], workspaceId) + } +} + +const checkIfChatflowHasChanged = async (chatflowId: string, lastUpdatedDateTime: string): Promise => { + try { + const appServer = getRunningExpressApp() + //** + const chatflow = await appServer.AppDataSource.getRepository(ChatFlow).findOneBy({ + id: chatflowId + }) + if (!chatflow) { + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Chatflow ${chatflowId} not found`) + } + // parse the lastUpdatedDateTime as a date and + //check if the updatedDate is the same as the lastUpdatedDateTime + return { hasChanged: chatflow.updatedDate.toISOString() !== lastUpdatedDateTime } + } catch (error) { + throw new InternalFlowiseError( + StatusCodes.INTERNAL_SERVER_ERROR, + `Error: chatflowsService.checkIfChatflowHasChanged - ${getErrorMessage(error)}` + ) } } @@ -373,11 +476,14 @@ export default { checkIfChatflowIsValidForUploads, deleteChatflow, getAllChatflows, + getAllChatflowsCount, getChatflowByApiKey, getChatflowById, saveChatflow, importChatflows, updateChatflow, getSinglePublicChatflow, - getSinglePublicChatbotConfig + getSinglePublicChatbotConfig, + checkIfChatflowHasChanged, + getAllChatflowsCountByOrganization } diff --git a/packages/server/src/services/credentials/index.ts b/packages/server/src/services/credentials/index.ts index b92218072a3..7b3625d71cb 100644 --- a/packages/server/src/services/credentials/index.ts +++ b/packages/server/src/services/credentials/index.ts @@ -6,6 +6,9 @@ import { transformToCredentialEntity, decryptCredentialData } from '../../utils' import { ICredentialReturnResponse } from '../../Interface' import { InternalFlowiseError } from '../../errors/internalFlowiseError' import { getErrorMessage } from '../../errors/utils' +import { getWorkspaceSearchOptions } from '../../enterprise/utils/ControllerServiceUtils' +import { WorkspaceShared } from '../../enterprise/database/entities/EnterpriseEntities' +import { WorkspaceService } from '../../enterprise/services/workspace.service' const createCredential = async (requestBody: any) => { try { @@ -39,7 +42,7 @@ const deleteCredentials = async (credentialId: string): Promise => { } } -const getAllCredentials = async (paramCredentialName: any) => { +const getAllCredentials = async (paramCredentialName: any, workspaceId?: string) => { try { const appServer = getRunningExpressApp() let dbResponse = [] @@ -47,22 +50,65 @@ const getAllCredentials = async (paramCredentialName: any) => { if (Array.isArray(paramCredentialName)) { for (let i = 0; i < paramCredentialName.length; i += 1) { const name = paramCredentialName[i] as string - const credentials = await appServer.AppDataSource.getRepository(Credential).findBy({ - credentialName: name - }) + const searchOptions = { + credentialName: name, + ...getWorkspaceSearchOptions(workspaceId) + } + const credentials = await appServer.AppDataSource.getRepository(Credential).findBy(searchOptions) dbResponse.push(...credentials) } } else { - const credentials = await appServer.AppDataSource.getRepository(Credential).findBy({ - credentialName: paramCredentialName as string - }) + const searchOptions = { + credentialName: paramCredentialName, + ...getWorkspaceSearchOptions(workspaceId) + } + const credentials = await appServer.AppDataSource.getRepository(Credential).findBy(searchOptions) dbResponse = [...credentials] } + // get shared credentials + if (workspaceId) { + const workspaceService = new WorkspaceService() + const sharedItems = (await workspaceService.getSharedItemsForWorkspace(workspaceId, 'credential')) as Credential[] + if (sharedItems.length) { + for (const sharedItem of sharedItems) { + // Check if paramCredentialName is array + if (Array.isArray(paramCredentialName)) { + for (let i = 0; i < paramCredentialName.length; i += 1) { + const name = paramCredentialName[i] as string + if (sharedItem.credentialName === name) { + // @ts-ignore + sharedItem.shared = true + dbResponse.push(sharedItem) + } + } + } else { + if (sharedItem.credentialName === paramCredentialName) { + // @ts-ignore + sharedItem.shared = true + dbResponse.push(sharedItem) + } + } + } + } + } } else { - const credentials = await appServer.AppDataSource.getRepository(Credential).find() + const credentials = await appServer.AppDataSource.getRepository(Credential).findBy(getWorkspaceSearchOptions(workspaceId)) for (const credential of credentials) { dbResponse.push(omit(credential, ['encryptedData'])) } + + // get shared credentials + if (workspaceId) { + const workspaceService = new WorkspaceService() + const sharedItems = (await workspaceService.getSharedItemsForWorkspace(workspaceId, 'credential')) as Credential[] + if (sharedItems.length) { + for (const sharedItem of sharedItems) { + // @ts-ignore + sharedItem.shared = true + dbResponse.push(sharedItem) + } + } + } } return dbResponse } catch (error) { @@ -73,7 +119,7 @@ const getAllCredentials = async (paramCredentialName: any) => { } } -const getCredentialById = async (credentialId: string): Promise => { +const getCredentialById = async (credentialId: string, workspaceId?: string): Promise => { try { const appServer = getRunningExpressApp() const credential = await appServer.AppDataSource.getRepository(Credential).findOneBy({ @@ -92,7 +138,19 @@ const getCredentialById = async (credentialId: string): Promise => { ...credential, plainDataObj: decryptedCredentialData } - const dbResponse = omit(returnCredential, ['encryptedData']) + const dbResponse: any = omit(returnCredential, ['encryptedData']) + if (workspaceId) { + const shared = await appServer.AppDataSource.getRepository(WorkspaceShared).count({ + where: { + workspaceId: workspaceId, + sharedItemId: credentialId, + itemType: 'credential' + } + }) + if (shared > 0) { + dbResponse.shared = true + } + } return dbResponse } catch (error) { throw new InternalFlowiseError( diff --git a/packages/server/src/services/dataset/index.ts b/packages/server/src/services/dataset/index.ts new file mode 100644 index 00000000000..4b4913226ba --- /dev/null +++ b/packages/server/src/services/dataset/index.ts @@ -0,0 +1,361 @@ +import { StatusCodes } from 'http-status-codes' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' +import { InternalFlowiseError } from '../../errors/internalFlowiseError' +import { getErrorMessage } from '../../errors/utils' +import { Dataset } from '../../database/entities/Dataset' +import { DatasetRow } from '../../database/entities/DatasetRow' +import { Readable } from 'stream' +import { In } from 'typeorm' + +import csv from 'csv-parser' +import { getWorkspaceSearchOptions } from '../../enterprise/utils/ControllerServiceUtils' + +const getAllDatasets = async (workspaceId?: string) => { + try { + const appServer = getRunningExpressApp() + const returnObj: Dataset[] = [] + const datasets = await appServer.AppDataSource.getRepository(Dataset).findBy(getWorkspaceSearchOptions(workspaceId)) + + // TODO: This is a hack to get the row count for each dataset. Need to find a better way to do this + for (const dataset of datasets) { + ;(dataset as any).rowCount = await appServer.AppDataSource.getRepository(DatasetRow).count({ + where: { datasetId: dataset.id } + }) + returnObj.push(dataset) + } + return returnObj + } catch (error) { + throw new InternalFlowiseError( + StatusCodes.INTERNAL_SERVER_ERROR, + `Error: datasetService.getAllDatasets - ${getErrorMessage(error)}` + ) + } +} + +const getDataset = async (id: string) => { + try { + const appServer = getRunningExpressApp() + const dataset = await appServer.AppDataSource.getRepository(Dataset).findOneBy({ + id: id + }) + let items = await appServer.AppDataSource.getRepository(DatasetRow).find({ + where: { datasetId: id }, + order: { sequenceNo: 'asc' } + }) + // special case for sequence numbers == -1 (this happens when the update script is run and all rows are set to -1) + // check if there are any sequence numbers == -1, if so set them to the max sequence number + 1 + const missingSequenceNumbers = items.filter((item) => item.sequenceNo === -1) + if (missingSequenceNumbers.length > 0) { + const maxSequenceNumber = items.reduce((prev, current) => (prev.sequenceNo > current.sequenceNo ? prev : current)) + let sequenceNo = maxSequenceNumber.sequenceNo + 1 + for (const zeroSequenceNumber of missingSequenceNumbers) { + zeroSequenceNumber.sequenceNo = sequenceNo++ + } + await appServer.AppDataSource.getRepository(DatasetRow).save(missingSequenceNumbers) + // now get the items again + items = await appServer.AppDataSource.getRepository(DatasetRow).find({ + where: { datasetId: id }, + order: { sequenceNo: 'asc' } + }) + } + + return { + ...dataset, + rows: items + } + } catch (error) { + throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `Error: datasetService.getDataset - ${getErrorMessage(error)}`) + } +} + +const reorderDatasetRow = async (datasetId: string, rows: any[]) => { + try { + const appServer = getRunningExpressApp() + await appServer.AppDataSource.transaction(async (entityManager) => { + // rows are an array of { id: string, sequenceNo: number } + // update the sequence numbers in the DB + for (const row of rows) { + const item = await entityManager.getRepository(DatasetRow).findOneBy({ + id: row.id + }) + if (!item) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Dataset Row ${row.id} not found`) + item.sequenceNo = row.sequenceNo + await entityManager.getRepository(DatasetRow).save(item) + } + await changeUpdateOnDataset(datasetId, entityManager) + }) + return { message: 'Dataset row reordered successfully' } + } catch (error) { + throw new InternalFlowiseError( + StatusCodes.INTERNAL_SERVER_ERROR, + `Error: datasetService.reorderDatasetRow - ${getErrorMessage(error)}` + ) + } +} + +const _readCSV = async (stream: Readable, results: any[]) => { + return new Promise((resolve, reject) => { + stream + .pipe( + csv({ + headers: false + }) + ) + .on('data', (data) => results.push(data)) + .on('end', () => { + resolve(results) + }) + .on('error', reject) + }) +} + +const _csvToDatasetRows = async (datasetId: string, csvString: string, firstRowHeaders: boolean) => { + try { + const appServer = getRunningExpressApp() + // get the max value first + const maxValueEntity = await appServer.AppDataSource.getRepository(DatasetRow).find({ + order: { + sequenceNo: 'DESC' + }, + take: 1 + }) + let sequenceNo = 0 + if (maxValueEntity && maxValueEntity.length > 0) { + sequenceNo = maxValueEntity[0].sequenceNo + } + sequenceNo++ + // Array to hold parsed records + const results: any[] = [] + let files: string[] = [] + + if (csvString.startsWith('[') && csvString.endsWith(']')) { + files = JSON.parse(csvString) + } else { + files = [csvString] + } + + for (const file of files) { + const splitDataURI = file.split(',') + splitDataURI.pop() + const bf = Buffer.from(splitDataURI.pop() || '', 'base64') + const csvString = bf.toString('utf8') + + // Convert CSV string to a Readable stream + const stream = Readable.from(csvString) + const rows: any[] = [] + await _readCSV(stream, rows) + results.push(...rows) + } + if (results && results?.length > 0) { + for (let r = 0; r < results.length; r++) { + const row = results[r] + let input = '' + let output = '' + if (firstRowHeaders && r === 0) { + continue + } + input = row['0'] + output = row['1'] + const newRow = appServer.AppDataSource.getRepository(DatasetRow).create(new DatasetRow()) + newRow.datasetId = datasetId + newRow.input = input + newRow.output = output + newRow.sequenceNo = sequenceNo + await appServer.AppDataSource.getRepository(DatasetRow).save(newRow) + sequenceNo++ + } + } + } catch (error) { + throw new InternalFlowiseError( + StatusCodes.INTERNAL_SERVER_ERROR, + `Error: datasetService._csvToDatasetRows - ${getErrorMessage(error)}` + ) + } +} + +// Create new dataset +const createDataset = async (body: any) => { + try { + const appServer = getRunningExpressApp() + const newDs = new Dataset() + Object.assign(newDs, body) + const dataset = appServer.AppDataSource.getRepository(Dataset).create(newDs) + const result = await appServer.AppDataSource.getRepository(Dataset).save(dataset) + if (body.csvFile) { + await _csvToDatasetRows(result.id, body.csvFile, body.firstRowHeaders) + } + return result + } catch (error) { + throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `Error: datasetService.createDataset - ${getErrorMessage(error)}`) + } +} + +// Update dataset +const updateDataset = async (id: string, body: any) => { + try { + const appServer = getRunningExpressApp() + const dataset = await appServer.AppDataSource.getRepository(Dataset).findOneBy({ + id: id + }) + if (!dataset) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Dataset ${id} not found`) + + const updateDataset = new Dataset() + Object.assign(updateDataset, body) + appServer.AppDataSource.getRepository(Dataset).merge(dataset, updateDataset) + const result = await appServer.AppDataSource.getRepository(Dataset).save(dataset) + return result + } catch (error) { + throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `Error: datasetService.updateDataset - ${getErrorMessage(error)}`) + } +} + +// Delete dataset via id +const deleteDataset = async (id: string) => { + try { + const appServer = getRunningExpressApp() + const result = await appServer.AppDataSource.getRepository(Dataset).delete({ id: id }) + + // delete all rows for this dataset + await appServer.AppDataSource.getRepository(DatasetRow).delete({ datasetId: id }) + + return result + } catch (error) { + throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `Error: datasetService.deleteDataset - ${getErrorMessage(error)}`) + } +} + +// Create new row in a given dataset +const addDatasetRow = async (body: any) => { + try { + const appServer = getRunningExpressApp() + if (body.csvFile) { + await _csvToDatasetRows(body.datasetId, body.csvFile, body.firstRowHeaders) + await changeUpdateOnDataset(body.datasetId) + return { message: 'Dataset rows added successfully' } + } else { + // get the max value first + const maxValueEntity = await appServer.AppDataSource.getRepository(DatasetRow).find({ + where: { + datasetId: body.datasetId + }, + order: { + sequenceNo: 'DESC' + }, + take: 1 + }) + let sequenceNo = 0 + if (maxValueEntity && maxValueEntity.length > 0) { + sequenceNo = maxValueEntity[0].sequenceNo + } + const newDs = new DatasetRow() + Object.assign(newDs, body) + newDs.sequenceNo = sequenceNo === 0 ? sequenceNo : sequenceNo + 1 + const row = appServer.AppDataSource.getRepository(DatasetRow).create(newDs) + const result = await appServer.AppDataSource.getRepository(DatasetRow).save(row) + await changeUpdateOnDataset(body.datasetId) + return result + } + } catch (error) { + throw new InternalFlowiseError( + StatusCodes.INTERNAL_SERVER_ERROR, + `Error: datasetService.createDatasetRow - ${getErrorMessage(error)}` + ) + } +} + +const changeUpdateOnDataset = async (id: string, entityManager?: any) => { + const appServer = getRunningExpressApp() + const dataset = await appServer.AppDataSource.getRepository(Dataset).findOneBy({ + id: id + }) + if (!dataset) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Dataset ${id} not found`) + + dataset.updatedDate = new Date() + if (entityManager) { + await entityManager.getRepository(Dataset).save(dataset) + } else { + await appServer.AppDataSource.getRepository(Dataset).save(dataset) + } +} + +// Update row for a dataset +const updateDatasetRow = async (id: string, body: any) => { + try { + const appServer = getRunningExpressApp() + const item = await appServer.AppDataSource.getRepository(DatasetRow).findOneBy({ + id: id + }) + if (!item) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Dataset Row ${id} not found`) + + const updateItem = new DatasetRow() + Object.assign(updateItem, body) + appServer.AppDataSource.getRepository(DatasetRow).merge(item, updateItem) + const result = await appServer.AppDataSource.getRepository(DatasetRow).save(item) + await changeUpdateOnDataset(body.datasetId) + return result + } catch (error) { + throw new InternalFlowiseError( + StatusCodes.INTERNAL_SERVER_ERROR, + `Error: datasetService.updateDatasetRow - ${getErrorMessage(error)}` + ) + } +} + +// Delete dataset row via id +const deleteDatasetRow = async (id: string) => { + try { + const appServer = getRunningExpressApp() + return await appServer.AppDataSource.transaction(async (entityManager) => { + const item = await entityManager.getRepository(DatasetRow).findOneBy({ + id: id + }) + if (!item) throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Dataset Row ${id} not found`) + + const result = await entityManager.getRepository(DatasetRow).delete({ id: id }) + await changeUpdateOnDataset(item.datasetId, entityManager) + return result + }) + } catch (error) { + throw new InternalFlowiseError( + StatusCodes.INTERNAL_SERVER_ERROR, + `Error: datasetService.deleteDatasetRow - ${getErrorMessage(error)}` + ) + } +} + +// Delete dataset rows via ids +const patchDeleteRows = async (ids: string[] = []) => { + try { + const appServer = getRunningExpressApp() + const datasetItemsToBeDeleted = await appServer.AppDataSource.getRepository(DatasetRow).find({ + where: { + id: In(ids) + } + }) + const dbResponse = await appServer.AppDataSource.getRepository(DatasetRow).delete(ids) + + const datasetIds = [...new Set(datasetItemsToBeDeleted.map((item) => item.datasetId))] + for (const datasetId of datasetIds) { + await changeUpdateOnDataset(datasetId) + } + return dbResponse + } catch (error) { + throw new InternalFlowiseError( + StatusCodes.INTERNAL_SERVER_ERROR, + `Error: datasetService.patchDeleteRows - ${getErrorMessage(error)}` + ) + } +} + +export default { + getAllDatasets, + getDataset, + createDataset, + updateDataset, + deleteDataset, + addDatasetRow, + updateDatasetRow, + deleteDatasetRow, + patchDeleteRows, + reorderDatasetRow +} diff --git a/packages/server/src/services/documentstore/index.ts b/packages/server/src/services/documentstore/index.ts index adea69baed6..e9e203d29c4 100644 --- a/packages/server/src/services/documentstore/index.ts +++ b/packages/server/src/services/documentstore/index.ts @@ -1,6 +1,4 @@ -import { getRunningExpressApp } from '../../utils/getRunningExpressApp' -import { DocumentStore } from '../../database/entities/DocumentStore' -import * as path from 'path' +import { Document } from '@langchain/core/documents' import { addArrayFilesToStorage, addSingleFileToStorage, @@ -14,9 +12,15 @@ import { removeSpecificFileFromStorage, removeSpecificFileFromUpload } from 'flowise-components' +import { StatusCodes } from 'http-status-codes' +import { cloneDeep, omit } from 'lodash' +import * as path from 'path' +import { DataSource, In } from 'typeorm' +import { v4 as uuidv4 } from 'uuid' import { addLoaderSource, ChatType, + DocumentStoreDTO, DocumentStoreStatus, IComponentNodes, IDocumentStoreFileChunkPagedResponse, @@ -27,38 +31,43 @@ import { IDocumentStoreUpsertData, IDocumentStoreWhereUsed, IExecuteDocStoreUpsert, + IExecutePreviewLoader, IExecuteProcessLoader, IExecuteVectorStoreInsert, INodeData, - MODE, IOverrideConfig, - IExecutePreviewLoader, - DocumentStoreDTO + MODE } from '../../Interface' +import { UsageCacheManager } from '../../UsageCacheManager' +import { ChatFlow } from '../../database/entities/ChatFlow' +import { DocumentStore } from '../../database/entities/DocumentStore' import { DocumentStoreFileChunk } from '../../database/entities/DocumentStoreFileChunk' -import { v4 as uuidv4 } from 'uuid' -import { databaseEntities, getAppVersion, saveUpsertFlowData } from '../../utils' -import logger from '../../utils/logger' -import nodesService from '../nodes' +import { UpsertHistory } from '../../database/entities/UpsertHistory' +import { getWorkspaceSearchOptions } from '../../enterprise/utils/ControllerServiceUtils' import { InternalFlowiseError } from '../../errors/internalFlowiseError' -import { StatusCodes } from 'http-status-codes' import { getErrorMessage } from '../../errors/utils' -import { ChatFlow } from '../../database/entities/ChatFlow' -import { Document } from '@langchain/core/documents' -import { UpsertHistory } from '../../database/entities/UpsertHistory' -import { cloneDeep, omit } from 'lodash' +import { databaseEntities, getAppVersion, saveUpsertFlowData } from '../../utils' +import { DOCUMENT_STORE_BASE_FOLDER, INPUT_PARAMS_TYPE, OMIT_QUEUE_JOB_DATA } from '../../utils/constants' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' +import logger from '../../utils/logger' import { DOCUMENTSTORE_TOOL_DESCRIPTION_PROMPT_GENERATOR } from '../../utils/prompt' -import { DataSource } from 'typeorm' +import { checkStorage, updateStorageUsage } from '../../utils/quotaUsage' import { Telemetry } from '../../utils/telemetry' -import { INPUT_PARAMS_TYPE, OMIT_QUEUE_JOB_DATA } from '../../utils/constants' - -const DOCUMENT_STORE_BASE_FOLDER = 'docustore' +import nodesService from '../nodes' -const createDocumentStore = async (newDocumentStore: DocumentStore) => { +const createDocumentStore = async (newDocumentStore: DocumentStore, orgId: string) => { try { const appServer = getRunningExpressApp() + const documentStore = appServer.AppDataSource.getRepository(DocumentStore).create(newDocumentStore) const dbResponse = await appServer.AppDataSource.getRepository(DocumentStore).save(documentStore) + await appServer.telemetry.sendTelemetry( + 'document_store_created', + { + version: await getAppVersion() + }, + orgId + ) return dbResponse } catch (error) { throw new InternalFlowiseError( @@ -68,10 +77,10 @@ const createDocumentStore = async (newDocumentStore: DocumentStore) => { } } -const getAllDocumentStores = async () => { +const getAllDocumentStores = async (workspaceId?: string) => { try { const appServer = getRunningExpressApp() - const entities = await appServer.AppDataSource.getRepository(DocumentStore).find() + const entities = await appServer.AppDataSource.getRepository(DocumentStore).findBy(getWorkspaceSearchOptions(workspaceId)) return entities } catch (error) { throw new InternalFlowiseError( @@ -81,22 +90,21 @@ const getAllDocumentStores = async () => { } } -const getAllDocumentFileChunks = async () => { - try { - const appServer = getRunningExpressApp() - const entities = await appServer.AppDataSource.getRepository(DocumentStoreFileChunk).find() - return entities - } catch (error) { - throw new InternalFlowiseError( - StatusCodes.INTERNAL_SERVER_ERROR, - `Error: documentStoreServices.getAllDocumentFileChunks - ${getErrorMessage(error)}` - ) - } +const getAllDocumentFileChunksByDocumentStoreIds = async (documentStoreIds: string[]) => { + const appServer = getRunningExpressApp() + return await appServer.AppDataSource.getRepository(DocumentStoreFileChunk).find({ where: { storeId: In(documentStoreIds) } }) } -const deleteLoaderFromDocumentStore = async (storeId: string, docId: string) => { +const deleteLoaderFromDocumentStore = async ( + storeId: string, + docId: string, + orgId: string, + workspaceId: string, + usageCacheManager: UsageCacheManager +) => { try { const appServer = getRunningExpressApp() + const entity = await appServer.AppDataSource.getRepository(DocumentStore).findOneBy({ id: storeId }) @@ -106,6 +114,13 @@ const deleteLoaderFromDocumentStore = async (storeId: string, docId: string) => `Error: documentStoreServices.deleteLoaderFromDocumentStore - Document store ${storeId} not found` ) } + + if (workspaceId) { + if (entity?.workspaceId !== workspaceId) { + throw new Error('Unauthorized access') + } + } + const existingLoaders = JSON.parse(entity.loaders) const found = existingLoaders.find((loader: IDocumentStoreLoader) => loader.id === docId) if (found) { @@ -113,7 +128,8 @@ const deleteLoaderFromDocumentStore = async (storeId: string, docId: string) => for (const file of found.files) { if (file.name) { try { - await removeSpecificFileFromStorage(DOCUMENT_STORE_BASE_FOLDER, storeId, file.name) + const { totalSize } = await removeSpecificFileFromStorage(orgId, DOCUMENT_STORE_BASE_FOLDER, storeId, file.name) + await updateStorageUsage(orgId, workspaceId, totalSize, usageCacheManager) } catch (error) { console.error(error) } @@ -259,6 +275,7 @@ const getDocumentStoreFileChunks = async (appDataSource: DataSource, storeId: st currentPage: pageNo, storeName: entity.name, description: entity.description, + workspaceId: entity.workspaceId, docId: docId, characters } @@ -271,9 +288,10 @@ const getDocumentStoreFileChunks = async (appDataSource: DataSource, storeId: st } } -const deleteDocumentStore = async (storeId: string) => { +const deleteDocumentStore = async (storeId: string, orgId: string, workspaceId: string, usageCacheManager: UsageCacheManager) => { try { const appServer = getRunningExpressApp() + // delete all the chunks associated with the store await appServer.AppDataSource.getRepository(DocumentStoreFileChunk).delete({ storeId: storeId @@ -285,7 +303,19 @@ const deleteDocumentStore = async (storeId: string) => { if (!entity) { throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Document store ${storeId} not found`) } - await removeFilesFromStorage(DOCUMENT_STORE_BASE_FOLDER, entity.id) + + if (workspaceId) { + if (entity?.workspaceId !== workspaceId) { + throw new Error('Unauthorized access') + } + } + + try { + const { totalSize } = await removeFilesFromStorage(orgId, DOCUMENT_STORE_BASE_FOLDER, entity.id) + await updateStorageUsage(orgId, workspaceId, totalSize, usageCacheManager) + } catch (error) { + logger.error(`[server]: Error deleting file storage for documentStore ${storeId}`) + } // delete upsert history await appServer.AppDataSource.getRepository(UpsertHistory).delete({ @@ -471,7 +501,16 @@ const updateDocumentStore = async (documentStore: DocumentStore, updatedDocument } } -const _saveFileToStorage = async (fileBase64: string, entity: DocumentStore) => { +const _saveFileToStorage = async ( + fileBase64: string, + entity: DocumentStore, + orgId: string, + workspaceId: string, + subscriptionId: string, + usageCacheManager: UsageCacheManager +) => { + await checkStorage(orgId, subscriptionId, usageCacheManager) + const splitDataURI = fileBase64.split(',') const filename = splitDataURI.pop()?.split(':')[1] ?? '' const bf = Buffer.from(splitDataURI.pop() || '', 'base64') @@ -480,7 +519,9 @@ const _saveFileToStorage = async (fileBase64: string, entity: DocumentStore) => if (mimePrefix) { mime = mimePrefix.split(';')[0].split(':')[1] } - await addSingleFileToStorage(mime, bf, filename, DOCUMENT_STORE_BASE_FOLDER, entity.id) + const { totalSize } = await addSingleFileToStorage(mime, bf, filename, orgId, DOCUMENT_STORE_BASE_FOLDER, entity.id) + await updateStorageUsage(orgId, workspaceId, totalSize, usageCacheManager) + return { id: uuidv4(), name: filename, @@ -530,7 +571,12 @@ const _splitIntoChunks = async (appDataSource: DataSource, componentNodes: IComp } } -const _normalizeFilePaths = async (appDataSource: DataSource, data: IDocumentStoreLoaderForPreview, entity: DocumentStore | null) => { +const _normalizeFilePaths = async ( + appDataSource: DataSource, + data: IDocumentStoreLoaderForPreview, + entity: DocumentStore | null, + orgId: string +) => { const keys = Object.getOwnPropertyNames(data.loaderConfig) let rehydrated = false for (let i = 0; i < keys.length; i++) { @@ -563,7 +609,7 @@ const _normalizeFilePaths = async (appDataSource: DataSource, data: IDocumentSto if (currentLoader) { const base64Files: string[] = [] for (const file of files) { - const bf = await getFileFromStorage(file, DOCUMENT_STORE_BASE_FOLDER, documentStoreEntity.id) + const bf = await getFileFromStorage(file, orgId, DOCUMENT_STORE_BASE_FOLDER, documentStoreEntity.id) // find the file entry that has the same name as the file const uploadedFile = currentLoader.files.find((uFile: IDocumentStoreLoaderFile) => uFile.name === file) const mimePrefix = 'data:' + uploadedFile.mimePrefix + ';base64' @@ -578,7 +624,13 @@ const _normalizeFilePaths = async (appDataSource: DataSource, data: IDocumentSto data.rehydrated = rehydrated } -const previewChunksMiddleware = async (data: IDocumentStoreLoaderForPreview) => { +const previewChunksMiddleware = async ( + data: IDocumentStoreLoaderForPreview, + orgId: string, + workspaceId: string, + subscriptionId: string, + usageCacheManager: UsageCacheManager +) => { try { const appServer = getRunningExpressApp() const appDataSource = appServer.AppDataSource @@ -587,14 +639,18 @@ const previewChunksMiddleware = async (data: IDocumentStoreLoaderForPreview) => const executeData: IExecutePreviewLoader = { appDataSource, componentNodes, + usageCacheManager, data, - isPreviewOnly: true + isPreviewOnly: true, + orgId, + workspaceId, + subscriptionId } if (process.env.MODE === MODE.QUEUE) { const upsertQueue = appServer.queueManager.getQueue('upsert') const job = await upsertQueue.addJob(omit(executeData, OMIT_QUEUE_JOB_DATA)) - logger.debug(`[server]: Job added to queue: ${job.id}`) + logger.debug(`[server]: [${orgId}]: Job added to queue: ${job.id}`) const queueEvents = upsertQueue.getQueueEvents() const result = await job.waitUntilFinished(queueEvents) @@ -614,7 +670,7 @@ const previewChunksMiddleware = async (data: IDocumentStoreLoaderForPreview) => } } -export const previewChunks = async ({ appDataSource, componentNodes, data }: IExecutePreviewLoader) => { +export const previewChunks = async ({ appDataSource, componentNodes, data, orgId }: IExecutePreviewLoader) => { try { if (data.preview) { if ( @@ -626,7 +682,7 @@ export const previewChunks = async ({ appDataSource, componentNodes, data }: IEx } } if (!data.rehydrated) { - await _normalizeFilePaths(appDataSource, data, null) + await _normalizeFilePaths(appDataSource, data, null, orgId) } let docs = await _splitIntoChunks(appDataSource, componentNodes, data) const totalChunks = docs.length @@ -726,7 +782,16 @@ const saveProcessingLoader = async (appDataSource: DataSource, data: IDocumentSt } } -export const processLoader = async ({ appDataSource, componentNodes, data, docLoaderId }: IExecuteProcessLoader) => { +export const processLoader = async ({ + appDataSource, + componentNodes, + data, + docLoaderId, + orgId, + workspaceId, + subscriptionId, + usageCacheManager +}: IExecuteProcessLoader) => { const entity = await appDataSource.getRepository(DocumentStore).findOneBy({ id: data.storeId }) @@ -736,11 +801,34 @@ export const processLoader = async ({ appDataSource, componentNodes, data, docLo `Error: documentStoreServices.processLoader - Document store ${data.storeId} not found` ) } - await _saveChunksToStorage(appDataSource, componentNodes, data, entity, docLoaderId) + if (workspaceId) { + if (entity?.workspaceId !== workspaceId) { + throw new Error('Unauthorized access') + } + } + await _saveChunksToStorage( + appDataSource, + componentNodes, + data, + entity, + docLoaderId, + orgId, + workspaceId, + subscriptionId, + usageCacheManager + ) return getDocumentStoreFileChunks(appDataSource, data.storeId as string, docLoaderId) } -const processLoaderMiddleware = async (data: IDocumentStoreLoaderForPreview, docLoaderId: string, isInternalRequest = false) => { +const processLoaderMiddleware = async ( + data: IDocumentStoreLoaderForPreview, + docLoaderId: string, + orgId: string, + workspaceId: string, + subscriptionId: string, + usageCacheManager: UsageCacheManager, + isInternalRequest = false +) => { try { const appServer = getRunningExpressApp() const appDataSource = appServer.AppDataSource @@ -753,13 +841,17 @@ const processLoaderMiddleware = async (data: IDocumentStoreLoaderForPreview, doc data, docLoaderId, isProcessWithoutUpsert: true, - telemetry + telemetry, + orgId, + workspaceId, + subscriptionId, + usageCacheManager } if (process.env.MODE === MODE.QUEUE) { const upsertQueue = appServer.queueManager.getQueue('upsert') const job = await upsertQueue.addJob(omit(executeData, OMIT_QUEUE_JOB_DATA)) - logger.debug(`[server]: Job added to queue: ${job.id}`) + logger.debug(`[server]: [${orgId}]: Job added to queue: ${job.id}`) if (isInternalRequest) { return { @@ -790,20 +882,28 @@ const _saveChunksToStorage = async ( componentNodes: IComponentNodes, data: IDocumentStoreLoaderForPreview, entity: DocumentStore, - newLoaderId: string + newLoaderId: string, + orgId: string, + workspaceId: string, + subscriptionId: string, + usageCacheManager: UsageCacheManager ) => { const re = new RegExp('^data.*;base64', 'i') try { //step 1: restore the full paths, if any - await _normalizeFilePaths(appDataSource, data, entity) + await _normalizeFilePaths(appDataSource, data, entity, orgId) //step 2: split the file into chunks const response = await previewChunks({ appDataSource, componentNodes, data, - isPreviewOnly: false + isPreviewOnly: false, + orgId, + workspaceId, + subscriptionId, + usageCacheManager }) //step 3: remove all files associated with the loader @@ -817,7 +917,13 @@ const _saveChunksToStorage = async ( if (loader.files) { loader.files.map(async (file: IDocumentStoreLoaderFile) => { try { - await removeSpecificFileFromStorage(DOCUMENT_STORE_BASE_FOLDER, entity.id, file.name) + const { totalSize } = await removeSpecificFileFromStorage( + orgId, + DOCUMENT_STORE_BASE_FOLDER, + entity.id, + file.name + ) + await updateStorageUsage(orgId, workspaceId, totalSize, usageCacheManager) } catch (error) { console.error(error) } @@ -832,6 +938,7 @@ const _saveChunksToStorage = async ( const keys = Object.getOwnPropertyNames(data.loaderConfig) for (let i = 0; i < keys.length; i++) { const input = data.loaderConfig[keys[i]] + if (!input) { continue } @@ -844,15 +951,15 @@ const _saveChunksToStorage = async ( for (let j = 0; j < files.length; j++) { const file = files[j] if (re.test(file)) { - const fileMetadata = await _saveFileToStorage(file, entity) + const fileMetadata = await _saveFileToStorage(file, entity, orgId, workspaceId, subscriptionId, usageCacheManager) fileNames.push(fileMetadata.name) filesWithMetadata.push(fileMetadata) } } - if (fileNames.length) data.loaderConfig[keys[i]] = 'FILE-STORAGE::' + JSON.stringify(fileNames) + data.loaderConfig[keys[i]] = 'FILE-STORAGE::' + JSON.stringify(fileNames) } else if (re.test(input)) { const fileNames: string[] = [] - const fileMetadata = await _saveFileToStorage(input, entity) + const fileMetadata = await _saveFileToStorage(input, entity, orgId, workspaceId, subscriptionId, usageCacheManager) fileNames.push(fileMetadata.name) filesWithMetadata.push(fileMetadata) data.loaderConfig[keys[i]] = 'FILE-STORAGE::' + JSON.stringify(fileNames) @@ -881,18 +988,27 @@ const _saveChunksToStorage = async ( } return acc }, 0) - response.chunks.map(async (chunk: IDocument, index: number) => { - const docChunk: DocumentStoreFileChunk = { - docId: newLoaderId, - storeId: data.storeId || '', - id: uuidv4(), - chunkNo: index + 1, - pageContent: chunk.pageContent, - metadata: JSON.stringify(chunk.metadata) - } - const dChunk = appDataSource.getRepository(DocumentStoreFileChunk).create(docChunk) - await appDataSource.getRepository(DocumentStoreFileChunk).save(dChunk) - }) + await Promise.all( + response.chunks.map(async (chunk: IDocument, index: number) => { + try { + const docChunk: DocumentStoreFileChunk = { + docId: newLoaderId, + storeId: data.storeId || '', + id: uuidv4(), + chunkNo: index + 1, + pageContent: sanitizeChunkContent(chunk.pageContent), + metadata: JSON.stringify(chunk.metadata) + } + const dChunk = appDataSource.getRepository(DocumentStoreFileChunk).create(docChunk) + await appDataSource.getRepository(DocumentStoreFileChunk).save(dChunk) + } catch (chunkError) { + throw new InternalFlowiseError( + StatusCodes.INTERNAL_SERVER_ERROR, + `Error: documentStoreServices._saveChunksToStorage - ${getErrorMessage(chunkError)}` + ) + } + }) + ) // update the loader with the new metrics loader.totalChunks = response.totalChunks loader.totalChars = totalChars @@ -915,6 +1031,12 @@ const _saveChunksToStorage = async ( } } +// remove null bytes from chunk content +const sanitizeChunkContent = (content: string) => { + // eslint-disable-next-line no-control-regex + return content.replaceAll(/\u0000/g, '') +} + // Get all component nodes const getDocumentLoaders = async () => { const removeDocumentLoadersWithName = ['documentStore', 'vectorStoreToDocument', 'unstructuredFolderLoader', 'folderFiles'] @@ -930,12 +1052,12 @@ const getDocumentLoaders = async () => { } } -const updateDocumentStoreUsage = async (chatId: string, storeId: string | undefined) => { +const updateDocumentStoreUsage = async (chatId: string, storeId: string | undefined, workspaceId?: string) => { try { // find the document store const appServer = getRunningExpressApp() // find all entities that have the chatId in their whereUsed - const entities = await appServer.AppDataSource.getRepository(DocumentStore).find() + const entities = await appServer.AppDataSource.getRepository(DocumentStore).findBy(getWorkspaceSearchOptions(workspaceId)) entities.map(async (entity: DocumentStore) => { const whereUsed = JSON.parse(entity.whereUsed) const found = whereUsed.find((w: string) => w === chatId) @@ -1071,14 +1193,15 @@ export const insertIntoVectorStore = async ({ componentNodes, telemetry, data, - isStrictSave + isStrictSave, + orgId }: IExecuteVectorStoreInsert) => { try { const entity = await saveVectorStoreConfig(appDataSource, data, isStrictSave) entity.status = DocumentStoreStatus.UPSERTING await appDataSource.getRepository(DocumentStore).save(entity) - const indexResult = await _insertIntoVectorStoreWorkerThread(appDataSource, componentNodes, telemetry, data, isStrictSave) + const indexResult = await _insertIntoVectorStoreWorkerThread(appDataSource, componentNodes, telemetry, data, isStrictSave, orgId) return indexResult } catch (error) { throw new InternalFlowiseError( @@ -1088,7 +1211,14 @@ export const insertIntoVectorStore = async ({ } } -const insertIntoVectorStoreMiddleware = async (data: ICommonObject, isStrictSave = true) => { +const insertIntoVectorStoreMiddleware = async ( + data: ICommonObject, + isStrictSave = true, + orgId: string, + workspaceId: string, + subscriptionId: string, + usageCacheManager: UsageCacheManager +) => { try { const appServer = getRunningExpressApp() const appDataSource = appServer.AppDataSource @@ -1101,13 +1231,17 @@ const insertIntoVectorStoreMiddleware = async (data: ICommonObject, isStrictSave telemetry, data, isStrictSave, - isVectorStoreInsert: true + isVectorStoreInsert: true, + orgId, + workspaceId, + subscriptionId, + usageCacheManager } if (process.env.MODE === MODE.QUEUE) { const upsertQueue = appServer.queueManager.getQueue('upsert') const job = await upsertQueue.addJob(omit(executeData, OMIT_QUEUE_JOB_DATA)) - logger.debug(`[server]: Job added to queue: ${job.id}`) + logger.debug(`[server]: [${orgId}]: Job added to queue: ${job.id}`) const queueEvents = upsertQueue.getQueueEvents() const result = await job.waitUntilFinished(queueEvents) @@ -1132,7 +1266,8 @@ const _insertIntoVectorStoreWorkerThread = async ( componentNodes: IComponentNodes, telemetry: Telemetry, data: ICommonObject, - isStrictSave = true + isStrictSave = true, + orgId: string ) => { try { const entity = await saveVectorStoreConfig(appDataSource, data, isStrictSave) @@ -1193,12 +1328,16 @@ const _insertIntoVectorStoreWorkerThread = async ( await appDataSource.getRepository(UpsertHistory).save(upsertHistoryItem) } - await telemetry.sendTelemetry('vector_upserted', { - version: await getAppVersion(), - chatlowId: chatflowid, - type: ChatType.INTERNAL, - flowGraph: omit(indexResult['result'], ['totalKeys', 'addedDocs']) - }) + await telemetry.sendTelemetry( + 'vector_upserted', + { + version: await getAppVersion(), + chatlowId: chatflowid, + type: ChatType.INTERNAL, + flowGraph: omit(indexResult['result'], ['totalKeys', 'addedDocs']) + }, + orgId + ) entity.status = DocumentStoreStatus.UPSERTED await appDataSource.getRepository(DocumentStore).save(entity) @@ -1459,7 +1598,11 @@ const upsertDocStore = async ( storeId: string, data: IDocumentStoreUpsertData, files: Express.Multer.File[] = [], - isRefreshExisting = false + isRefreshExisting = false, + orgId: string, + workspaceId: string, + subscriptionId: string, + usageCacheManager: UsageCacheManager ) => { const docId = data.docId let metadata = {} @@ -1506,6 +1649,13 @@ const upsertDocStore = async ( if (!entity) { throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Document store ${storeId} not found`) } + + if (workspaceId) { + if (entity?.workspaceId !== workspaceId) { + throw new Error('Unauthorized access') + } + } + const loaders = JSON.parse(entity.loaders) const loader = loaders.find((ldr: IDocumentStoreLoader) => ldr.id === docId) if (!loader) { @@ -1593,7 +1743,17 @@ const upsertDocStore = async ( file.originalname = Buffer.from(file.originalname, 'latin1').toString('utf8') try { - await addArrayFilesToStorage(file.mimetype, fileBuffer, file.originalname, fileNames, DOCUMENT_STORE_BASE_FOLDER, storeId) + checkStorage(orgId, subscriptionId, usageCacheManager) + const { totalSize } = await addArrayFilesToStorage( + file.mimetype, + fileBuffer, + file.originalname, + fileNames, + orgId, + DOCUMENT_STORE_BASE_FOLDER, + storeId + ) + await updateStorageUsage(orgId, workspaceId, totalSize, usageCacheManager) } catch (error) { continue } @@ -1680,7 +1840,11 @@ const upsertDocStore = async ( data: processData, docLoaderId: newLoader.id || '', isProcessWithoutUpsert: false, - telemetry + telemetry, + orgId, + workspaceId, + subscriptionId, + usageCacheManager }) const newDocId = result.docId @@ -1701,10 +1865,13 @@ const upsertDocStore = async ( telemetry, data: insertData, isStrictSave: false, - isVectorStoreInsert: true + isVectorStoreInsert: true, + orgId, + workspaceId, + subscriptionId, + usageCacheManager }) res.docId = newDocId - if (createNewDocStore) res.storeId = storeId return res } catch (error) { @@ -1722,17 +1889,41 @@ export const executeDocStoreUpsert = async ({ storeId, totalItems, files, - isRefreshAPI + isRefreshAPI, + orgId, + workspaceId, + subscriptionId, + usageCacheManager }: IExecuteDocStoreUpsert) => { const results = [] for (const item of totalItems) { - const res = await upsertDocStore(appDataSource, componentNodes, telemetry, storeId, item, files, isRefreshAPI) + const res = await upsertDocStore( + appDataSource, + componentNodes, + telemetry, + storeId, + item, + files, + isRefreshAPI, + orgId, + workspaceId, + subscriptionId, + usageCacheManager + ) results.push(res) } return isRefreshAPI ? results : results[0] } -const upsertDocStoreMiddleware = async (storeId: string, data: IDocumentStoreUpsertData, files: Express.Multer.File[] = []) => { +const upsertDocStoreMiddleware = async ( + storeId: string, + data: IDocumentStoreUpsertData, + files: Express.Multer.File[] = [], + orgId: string, + workspaceId: string, + subscriptionId: string, + usageCacheManager: UsageCacheManager +) => { const appServer = getRunningExpressApp() const componentNodes = appServer.nodesPool.componentNodes const appDataSource = appServer.AppDataSource @@ -1746,13 +1937,17 @@ const upsertDocStoreMiddleware = async (storeId: string, data: IDocumentStoreUps storeId, totalItems: [data], files, - isRefreshAPI: false + isRefreshAPI: false, + orgId, + workspaceId, + subscriptionId, + usageCacheManager } if (process.env.MODE === MODE.QUEUE) { const upsertQueue = appServer.queueManager.getQueue('upsert') const job = await upsertQueue.addJob(omit(executeData, OMIT_QUEUE_JOB_DATA)) - logger.debug(`[server]: Job added to queue: ${job.id}`) + logger.debug(`[server]: [${orgId}]: Job added to queue: ${job.id}`) const queueEvents = upsertQueue.getQueueEvents() const result = await job.waitUntilFinished(queueEvents) @@ -1772,7 +1967,14 @@ const upsertDocStoreMiddleware = async (storeId: string, data: IDocumentStoreUps } } -const refreshDocStoreMiddleware = async (storeId: string, data?: IDocumentStoreRefreshData) => { +const refreshDocStoreMiddleware = async ( + storeId: string, + data: IDocumentStoreRefreshData, + orgId: string, + workspaceId: string, + subscriptionId: string, + usageCacheManager: UsageCacheManager +) => { const appServer = getRunningExpressApp() const componentNodes = appServer.nodesPool.componentNodes const appDataSource = appServer.AppDataSource @@ -1787,6 +1989,12 @@ const refreshDocStoreMiddleware = async (storeId: string, data?: IDocumentStoreR throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Document store ${storeId} not found`) } + if (workspaceId) { + if (entity?.workspaceId !== workspaceId) { + throw new Error('Unauthorized access') + } + } + const loaders = JSON.parse(entity.loaders) totalItems = loaders.map((ldr: IDocumentStoreLoader) => { return { @@ -1804,13 +2012,17 @@ const refreshDocStoreMiddleware = async (storeId: string, data?: IDocumentStoreR storeId, totalItems, files: [], - isRefreshAPI: true + isRefreshAPI: true, + orgId, + workspaceId, + subscriptionId, + usageCacheManager } if (process.env.MODE === MODE.QUEUE) { const upsertQueue = appServer.queueManager.getQueue('upsert') const job = await upsertQueue.addJob(omit(executeData, OMIT_QUEUE_JOB_DATA)) - logger.debug(`[server]: Job added to queue: ${job.id}`) + logger.debug(`[server]: [${orgId}]: Job added to queue: ${job.id}`) const queueEvents = upsertQueue.getQueueEvents() const result = await job.waitUntilFinished(queueEvents) @@ -2033,7 +2245,7 @@ export default { createDocumentStore, deleteLoaderFromDocumentStore, getAllDocumentStores, - getAllDocumentFileChunks, + getAllDocumentFileChunksByDocumentStoreIds, getDocumentStoreById, getUsedChatflowNames, getDocumentStoreFileChunks, diff --git a/packages/server/src/services/evaluations/CostCalculator.ts b/packages/server/src/services/evaluations/CostCalculator.ts new file mode 100644 index 00000000000..8813ade958d --- /dev/null +++ b/packages/server/src/services/evaluations/CostCalculator.ts @@ -0,0 +1,60 @@ +import { ICommonObject } from 'flowise-components' + +// fractionDigits is the number of digits after the decimal point, for display purposes +const fractionDigits = 2 +// This function calculates the cost of the tokens from a metrics array +export const calculateCost = (metricsArray: ICommonObject[]) => { + for (let i = 0; i < metricsArray.length; i++) { + const metric = metricsArray[i] + const model = metric.model + if (!model) { + continue + } + const completionTokens = metric.completionTokens + const promptTokens = metric.promptTokens + const totalTokens = metric.totalTokens + + let promptTokensCost: string = '0' + let completionTokensCost: string = '0' + let totalTokensCost = '0' + if (metric.cost_values) { + const costValues = metric.cost_values + if (costValues.total_price > 0) { + let cost = costValues.total_cost * (totalTokens / 1000) + if (cost < 0.01) { + totalTokensCost = '$ <0.01' + } else { + totalTokensCost = '$ ' + cost.toFixed(fractionDigits) + } + } else { + let totalCost = 0 + if (promptTokens) { + const cost = costValues.input_cost * (promptTokens / 1000) + totalCost += cost + if (cost < 0.01) { + promptTokensCost = '$ <0.01' + } else { + promptTokensCost = '$ ' + cost.toFixed(fractionDigits) + } + } + if (completionTokens) { + const cost = costValues.output_cost * (completionTokens / 1000) + totalCost += cost + if (cost < 0.01) { + completionTokensCost = '$ <0.01' + } else { + completionTokensCost = '$ ' + cost.toFixed(fractionDigits) + } + } + if (totalCost < 0.01) { + totalTokensCost = '$ <0.01' + } else { + totalTokensCost = '$ ' + totalCost.toFixed(fractionDigits) + } + } + } + metric['totalCost'] = totalTokensCost + metric['promptCost'] = promptTokensCost + metric['completionCost'] = completionTokensCost + } +} diff --git a/packages/server/src/services/evaluations/EvaluatorRunner.ts b/packages/server/src/services/evaluations/EvaluatorRunner.ts new file mode 100644 index 00000000000..4b2d7d81da1 --- /dev/null +++ b/packages/server/src/services/evaluations/EvaluatorRunner.ts @@ -0,0 +1,214 @@ +import evaluatorsService from '../evaluator' +import { ICommonObject } from 'flowise-components' + +interface EvaluatorReturnType { + name: string + type?: string + operator?: string + measure?: string + value?: string + result: 'Pass' | 'Fail' | 'Error' +} + +export const runAdditionalEvaluators = async ( + metricsArray: ICommonObject[], + actualOutputArray: string[], + errorArray: string[], + selectedEvaluators: string[] +) => { + const evaluationResults: any[] = [] + const evaluatorDict: any = {} + + for (let j = 0; j < actualOutputArray.length; j++) { + const subArray: EvaluatorReturnType[] = [] + const actualOutput = actualOutputArray[j].toLowerCase().trim() + + for (let i = 0; i < selectedEvaluators.length; i++) { + const evaluatorId = selectedEvaluators[i] + let evaluator = evaluatorDict[evaluatorId] + if (!evaluator) { + evaluator = await evaluatorsService.getEvaluator(evaluatorId) + evaluatorDict[evaluatorId] = evaluator + } + + // iterate through each actual output and run the evaluator + const returnFields: EvaluatorReturnType = { + ...evaluator + } + if (errorArray[j]) { + // if this output is an error, skip over the evaluators. + subArray.push({ + ...returnFields, + result: 'Error' + }) + continue + } + try { + if (evaluator.type === 'numeric') { + const metric = metricsArray[j] + const metricValue = metric[evaluator.measure] + + subArray.push({ + ...returnFields, + result: evaluateExpression( + evaluator.measure !== 'responseLength' ? metricValue : actualOutput.length, + evaluator.operator, + evaluator.value + ) + ? 'Pass' + : 'Fail' + }) + } + if (evaluator.type === 'json') { + const operator = evaluator.operator + let passed = false + if (operator === 'IsValidJSON') { + try { + passed = JSON.parse(actualOutput) !== undefined + } catch (error) { + passed = false + } + } else if (operator === 'IsNotValidJSON') { + try { + JSON.parse(actualOutput) + passed = false + } catch (error) { + passed = true + } + } + subArray.push({ + ...returnFields, + result: passed ? 'Pass' : 'Fail' + }) + } + if (evaluator.type === 'text') { + const operator = evaluator.operator + const value = evaluator.value.toLowerCase().trim() as string + let splitValues = [] + let passed = false + switch (operator) { + case 'NotStartsWith': + subArray.push({ + ...returnFields, + result: actualOutput.startsWith(value) ? 'Fail' : 'Pass' + }) + break + case 'StartsWith': + subArray.push({ + ...returnFields, + result: actualOutput.startsWith(value) ? 'Pass' : 'Fail' + }) + break + case 'ContainsAny': + passed = false + splitValues = value.split(',').map((v) => v.trim().toLowerCase()) // Split, trim, and convert to lowercase + for (let i = 0; i < splitValues.length; i++) { + if (actualOutput.includes(splitValues[i])) { + passed = true + break + } + } + subArray.push({ + ...returnFields, + result: passed ? 'Pass' : 'Fail' + }) + break + case 'ContainsAll': + passed = true + splitValues = value.split(',').map((v) => v.trim().toLowerCase()) // Split, trim, and convert to lowercase + for (let i = 0; i < splitValues.length; i++) { + if (!actualOutput.includes(splitValues[i])) { + passed = false + break + } + } + subArray.push({ + ...returnFields, + result: passed ? 'Pass' : 'Fail' + }) + break + case 'DoesNotContainAny': + passed = true + splitValues = value.split(',').map((v) => v.trim().toLowerCase()) // Split, trim, and convert to lowercase + for (let i = 0; i < splitValues.length; i++) { + if (actualOutput.includes(splitValues[i])) { + passed = false + break + } + } + subArray.push({ + ...returnFields, + result: passed ? 'Fail' : 'Pass' + }) + break + case 'DoesNotContainAll': + passed = true + splitValues = value.split(',').map((v) => v.trim().toLowerCase()) // Split, trim, and convert to lowercase + for (let i = 0; i < splitValues.length; i++) { + if (actualOutput.includes(splitValues[i])) { + passed = false + break + } + } + subArray.push({ + ...returnFields, + result: passed ? 'Pass' : 'Fail' + }) + break + } + } + } catch (error) { + subArray.push({ + name: evaluator?.name || 'Missing Evaluator', + result: 'Error' + }) + } + } + evaluationResults.push(subArray) + } + // iterate through the array of evaluation results and count the number of passes and fails using the result key + let passCount = 0 + let failCount = 0 + let errorCount = 0 + for (let i = 0; i < evaluationResults.length; i++) { + const subArray = evaluationResults[i] + for (let j = 0; j < subArray.length; j++) { + if (subArray[j].result === 'Pass') { + passCount++ + } else if (subArray[j].result === 'Fail') { + failCount++ + } else if (subArray[j].result === 'Error') { + errorCount++ + } + delete subArray[j].createdDate + delete subArray[j].updatedDate + } + } + return { + results: evaluationResults, + evaluatorMetrics: { + passCount, + failCount, + errorCount + } + } +} + +const evaluateExpression = (actual: number, operator: string, expected: string) => { + switch (operator) { + case 'equals': + return actual === parseInt(expected) + case 'notEquals': + return actual !== parseInt(expected) + case 'greaterThan': + return actual > parseInt(expected) + case 'lessThan': + return actual < parseInt(expected) + case 'greaterThanOrEquals': + return actual >= parseInt(expected) + case 'lessThanOrEquals': + return actual <= parseInt(expected) + default: + return false + } +} diff --git a/packages/server/src/services/evaluations/LLMEvaluationRunner.ts b/packages/server/src/services/evaluations/LLMEvaluationRunner.ts new file mode 100644 index 00000000000..351fdad6092 --- /dev/null +++ b/packages/server/src/services/evaluations/LLMEvaluationRunner.ts @@ -0,0 +1,71 @@ +import { convertSchemaToZod, ICommonObject } from 'flowise-components' +import { z } from 'zod' +import { RunnableSequence } from '@langchain/core/runnables' +import { PromptTemplate } from '@langchain/core/prompts' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' +import { databaseEntities } from '../../utils' + +export class LLMEvaluationRunner { + private llm: any + + async runLLMEvaluators(data: ICommonObject, actualOutputArray: string[], errorArray: string[], llmEvaluatorMap: any[]) { + const evaluationResults: any[] = [] + if (this.llm === undefined) { + this.llm = await this.createLLM(data) + } + + for (let j = 0; j < actualOutputArray.length; j++) { + const actualOutput = actualOutputArray[j] + for (let i = 0; i < llmEvaluatorMap.length; i++) { + if (errorArray[j] !== '') { + evaluationResults.push({ + error: 'Not Graded!' + }) + continue + } + try { + const llmEvaluator = llmEvaluatorMap[i] + let evaluator = llmEvaluator.evaluator + const schema = z.object(convertSchemaToZod(JSON.stringify(evaluator.outputSchema))) + const modelWithStructuredOutput = this.llm.withStructuredOutput(schema) + const llmExecutor = RunnableSequence.from([ + PromptTemplate.fromTemplate(evaluator.prompt as string), + modelWithStructuredOutput + ]) + const response = await llmExecutor.invoke({ + question: data.input, + actualOutput: actualOutput, + expectedOutput: data.expectedOutput + }) + evaluationResults.push(response) + } catch (error) { + evaluationResults.push({ + error: 'error' + }) + } + } + } + return evaluationResults + } + + async createLLM(data: ICommonObject): Promise { + try { + const appServer = getRunningExpressApp() + const nodeInstanceFilePath = appServer.nodesPool.componentNodes[data.llmConfig.llm].filePath as string + const nodeModule = await import(nodeInstanceFilePath) + const newNodeInstance = new nodeModule.nodeClass() + let nodeData = { + inputs: { modelName: data.llmConfig.model }, + credential: data.llmConfig.credentialId, + id: 'llm_0' + } + const options: ICommonObject = { + appDataSource: appServer.AppDataSource, + databaseEntities: databaseEntities + } + return await newNodeInstance.init(nodeData, undefined, options) + } catch (error) { + throw new Error('Error creating LLM') + } + } +} diff --git a/packages/server/src/services/evaluations/index.ts b/packages/server/src/services/evaluations/index.ts new file mode 100644 index 00000000000..acca639cc05 --- /dev/null +++ b/packages/server/src/services/evaluations/index.ts @@ -0,0 +1,520 @@ +import { StatusCodes } from 'http-status-codes' +import { EvaluationRunner, ICommonObject } from 'flowise-components' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' +import { InternalFlowiseError } from '../../errors/internalFlowiseError' +import { getErrorMessage } from '../../errors/utils' +import { Dataset } from '../../database/entities/Dataset' +import { DatasetRow } from '../../database/entities/DatasetRow' +import { Evaluation } from '../../database/entities/Evaluation' +import { EvaluationStatus, IEvaluationResult } from '../../Interface' +import { EvaluationRun } from '../../database/entities/EvaluationRun' +import { Credential } from '../../database/entities/Credential' +import { ApiKey } from '../../database/entities/ApiKey' +import { ChatFlow } from '../../database/entities/ChatFlow' +import { getAppVersion } from '../../utils' +import { In } from 'typeorm' +import { getWorkspaceSearchOptions } from '../../enterprise/utils/ControllerServiceUtils' +import { v4 as uuidv4 } from 'uuid' +import { calculateCost } from './CostCalculator' +import { runAdditionalEvaluators } from './EvaluatorRunner' +import evaluatorsService from '../evaluator' +import { LLMEvaluationRunner } from './LLMEvaluationRunner' + +const runAgain = async (id: string, baseURL: string, orgId: string) => { + try { + const appServer = getRunningExpressApp() + const evaluation = await appServer.AppDataSource.getRepository(Evaluation).findOneBy({ + id: id + }) + if (!evaluation) throw new Error(`Evaluation ${id} not found`) + const additionalConfig: any = JSON.parse(evaluation.additionalConfig) + const data: ICommonObject = { + chatflowId: evaluation.chatflowId, + chatflowName: evaluation.chatflowName, + datasetName: evaluation.datasetName, + datasetId: evaluation.datasetId, + evaluationType: evaluation.evaluationType, + selectedSimpleEvaluators: JSON.stringify(additionalConfig.simpleEvaluators), + datasetAsOneConversation: additionalConfig.datasetAsOneConversation + } + data.name = evaluation.name + data.workspaceId = evaluation.workspaceId + if (evaluation.evaluationType === 'llm') { + data.selectedLLMEvaluators = JSON.stringify(additionalConfig.lLMEvaluators) + data.credentialId = additionalConfig.credentialId + // this is to preserve backward compatibility for evaluations created before the llm/model options were added + if (!additionalConfig.credentialId && additionalConfig.llmConfig) { + data.model = additionalConfig.llmConfig.model + data.llm = additionalConfig.llmConfig.llm + data.credentialId = additionalConfig.llmConfig.credentialId + } else { + data.model = 'gpt-3.5-turbo' + data.llm = 'OpenAI' + } + } + data.version = true + return await createEvaluation(data, baseURL, orgId) + } catch (error) { + throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `Error: EvalsService.runAgain - ${getErrorMessage(error)}`) + } +} + +const createEvaluation = async (body: ICommonObject, baseURL: string, orgId: string) => { + try { + const appServer = getRunningExpressApp() + const newEval = new Evaluation() + Object.assign(newEval, body) + newEval.status = EvaluationStatus.PENDING + + const row = appServer.AppDataSource.getRepository(Evaluation).create(newEval) + row.average_metrics = JSON.stringify({}) + + const additionalConfig: any = { + datasetAsOneConversation: body.datasetAsOneConversation, + simpleEvaluators: body.selectedSimpleEvaluators.length > 0 ? JSON.parse(body.selectedSimpleEvaluators) : [] + } + + if (body.evaluationType === 'llm') { + additionalConfig.lLMEvaluators = body.selectedLLMEvaluators.length > 0 ? JSON.parse(body.selectedLLMEvaluators) : [] + additionalConfig.llmConfig = { + credentialId: body.credentialId, + llm: body.llm, + model: body.model + } + } + row.additionalConfig = JSON.stringify(additionalConfig) + const newEvaluation = await appServer.AppDataSource.getRepository(Evaluation).save(row) + + await appServer.telemetry.sendTelemetry( + 'evaluation_created', + { + version: await getAppVersion() + }, + orgId + ) + + const dataset = await appServer.AppDataSource.getRepository(Dataset).findOneBy({ + id: body.datasetId + }) + if (!dataset) throw new Error(`Dataset ${body.datasetId} not found`) + + const items = await appServer.AppDataSource.getRepository(DatasetRow).find({ + where: { datasetId: dataset.id }, + order: { sequenceNo: 'ASC' } + }) + ;(dataset as any).rows = items + + const data: ICommonObject = { + chatflowId: body.chatflowId, + dataset: dataset, + evaluationType: body.evaluationType, + evaluationId: newEvaluation.id, + credentialId: body.credentialId + } + if (body.datasetAsOneConversation) { + data.sessionId = uuidv4() + } + + // When chatflow has an APIKey + const apiKeys: { chatflowId: string; apiKey: string }[] = [] + const chatflowIds = JSON.parse(body.chatflowId) + for (let i = 0; i < chatflowIds.length; i++) { + const chatflowId = chatflowIds[i] + const cFlow = await appServer.AppDataSource.getRepository(ChatFlow).findOneBy({ + id: chatflowId + }) + if (cFlow && cFlow.apikeyid) { + const apikeyObj = await appServer.AppDataSource.getRepository(ApiKey).findOneBy({ + id: cFlow.apikeyid + }) + if (apikeyObj) { + apiKeys.push({ + chatflowId: chatflowId, + apiKey: apikeyObj.apiKey + }) + } + } + } + if (apiKeys.length > 0) { + data.apiKeys = apiKeys + } + + // save the evaluation with status as pending + const evalRunner = new EvaluationRunner(baseURL) + if (body.evaluationType === 'llm') { + const credential = await appServer.AppDataSource.getRepository(Credential).findOneBy({ + id: body.credentialId + }) + + if (!credential) throw new Error(`Credential ${body.credentialId} not found`) + } + + let evalMetrics = { passCount: 0, failCount: 0, errorCount: 0 } + evalRunner + .runEvaluations(data) + .then(async (result: any) => { + let totalTime = 0 + // let us assume that the eval is successful + let allRowsSuccessful = true + try { + const llmEvaluationRunner = new LLMEvaluationRunner() + for (const resultRow of result.rows) { + const metricsArray: ICommonObject[] = [] + const actualOutputArray: string[] = [] + const errorArray: string[] = [] + for (const evaluationRow of resultRow.evaluations) { + if (evaluationRow.status === 'error') { + // if a row failed, mark the entire run as failed (error) + allRowsSuccessful = false + } + actualOutputArray.push(evaluationRow.actualOutput) + totalTime += parseFloat(evaluationRow.latency) + let metricsObjFromRun: ICommonObject = {} + + const metrics = evaluationRow.metrics + if (metrics) { + metrics.map((metric: any) => { + if (metric) { + const json = typeof metric === 'object' ? metric : JSON.parse(metric) + Object.getOwnPropertyNames(json).map((key) => { + metricsObjFromRun[key] = json[key] + }) + } + }) + metricsArray.push(metricsObjFromRun) + } + errorArray.push(evaluationRow.error) + } + + const newRun = new EvaluationRun() + newRun.evaluationId = newEvaluation.id + newRun.runDate = new Date() + newRun.input = resultRow.input + newRun.expectedOutput = resultRow.expectedOutput + newRun.actualOutput = JSON.stringify(actualOutputArray) + newRun.errors = JSON.stringify(errorArray) + calculateCost(metricsArray) + newRun.metrics = JSON.stringify(metricsArray) + + const { results, evaluatorMetrics } = await runAdditionalEvaluators( + metricsArray, + actualOutputArray, + errorArray, + body.selectedSimpleEvaluators.length > 0 ? JSON.parse(body.selectedSimpleEvaluators) : [] + ) + + newRun.evaluators = JSON.stringify(results) + evalMetrics.passCount += evaluatorMetrics.passCount + evalMetrics.failCount += evaluatorMetrics.failCount + evalMetrics.errorCount += evaluatorMetrics.errorCount + + if (body.evaluationType === 'llm') { + resultRow.llmConfig = additionalConfig.llmConfig + resultRow.LLMEvaluators = body.selectedLLMEvaluators.length > 0 ? JSON.parse(body.selectedLLMEvaluators) : [] + const llmEvaluatorMap: any = [] + for (let i = 0; i < resultRow.LLMEvaluators.length; i++) { + const evaluatorId = resultRow.LLMEvaluators[i] + const evaluator = await evaluatorsService.getEvaluator(evaluatorId) + llmEvaluatorMap.push({ + evaluatorId: evaluatorId, + evaluator: evaluator + }) + } + // iterate over the actualOutputArray and add the actualOutput to the evaluationLineItem object + const resultArray = await llmEvaluationRunner.runLLMEvaluators( + resultRow, + actualOutputArray, + errorArray, + llmEvaluatorMap + ) + newRun.llmEvaluators = JSON.stringify(resultArray) + const row = appServer.AppDataSource.getRepository(EvaluationRun).create(newRun) + await appServer.AppDataSource.getRepository(EvaluationRun).save(row) + } else { + const row = appServer.AppDataSource.getRepository(EvaluationRun).create(newRun) + await appServer.AppDataSource.getRepository(EvaluationRun).save(row) + } + } + //update the evaluation with status as completed + let passPercent = -1 + if (evalMetrics.passCount + evalMetrics.failCount + evalMetrics.errorCount > 0) { + passPercent = + (evalMetrics.passCount / (evalMetrics.passCount + evalMetrics.failCount + evalMetrics.errorCount)) * 100 + } + appServer.AppDataSource.getRepository(Evaluation) + .findOneBy({ id: newEvaluation.id }) + .then((evaluation: any) => { + evaluation.status = allRowsSuccessful ? EvaluationStatus.COMPLETED : EvaluationStatus.ERROR + evaluation.average_metrics = JSON.stringify({ + averageLatency: (totalTime / result.rows.length).toFixed(3), + totalRuns: result.rows.length, + ...evalMetrics, + passPcnt: passPercent.toFixed(2) + }) + appServer.AppDataSource.getRepository(Evaluation).save(evaluation) + }) + } catch (error) { + //update the evaluation with status as error + appServer.AppDataSource.getRepository(Evaluation) + .findOneBy({ id: newEvaluation.id }) + .then((evaluation: any) => { + evaluation.status = EvaluationStatus.ERROR + appServer.AppDataSource.getRepository(Evaluation).save(evaluation) + }) + } + }) + .catch((error) => { + // Handle errors from runEvaluations + console.error('Error running evaluations:', getErrorMessage(error)) + appServer.AppDataSource.getRepository(Evaluation) + .findOneBy({ id: newEvaluation.id }) + .then((evaluation: any) => { + evaluation.status = EvaluationStatus.ERROR + evaluation.average_metrics = JSON.stringify({ + error: getErrorMessage(error) + }) + appServer.AppDataSource.getRepository(Evaluation).save(evaluation) + }) + .catch((dbError) => { + console.error('Error updating evaluation status:', getErrorMessage(dbError)) + }) + }) + + return getAllEvaluations(body.workspaceId) + } catch (error) { + throw new InternalFlowiseError( + StatusCodes.INTERNAL_SERVER_ERROR, + `Error: EvalsService.createEvaluation - ${getErrorMessage(error)}` + ) + } +} + +const getAllEvaluations = async (workspaceId?: string) => { + try { + const appServer = getRunningExpressApp() + const findAndOrderBy: any = { + where: getWorkspaceSearchOptions(workspaceId), + order: { + runDate: 'DESC' + } + } + const evaluations = await appServer.AppDataSource.getRepository(Evaluation).find(findAndOrderBy) + + const returnResults: IEvaluationResult[] = [] + // mark the first evaluation with a unique name as the latestEval and then reset the version number + for (let i = 0; i < evaluations.length; i++) { + const evaluation = evaluations[i] as IEvaluationResult + returnResults.push(evaluation) + // find the first index with this name in the evaluations array + // as it is sorted desc, make the first evaluation with this name as the latestEval + const currentIndex = evaluations.indexOf(evaluation) + if (evaluations.findIndex((e) => e.name === evaluation.name) === currentIndex) { + returnResults[i].latestEval = true + } + } + + for (let i = 0; i < returnResults.length; i++) { + const evaluation = returnResults[i] + if (evaluation.latestEval) { + const versions = returnResults.filter((e) => e.name === evaluation.name) + let descVersion = versions.length + for (let j = 0; j < versions.length; j++) { + versions[j].version = descVersion-- + } + } + } + + return returnResults + } catch (error) { + throw new InternalFlowiseError( + StatusCodes.INTERNAL_SERVER_ERROR, + `Error: EvalsService.getAllEvaluations - ${getErrorMessage(error)}` + ) + } +} + +// Delete evaluation and all rows via id +const deleteEvaluation = async (id: string, activeWorkspaceId?: string) => { + try { + const appServer = getRunningExpressApp() + await appServer.AppDataSource.getRepository(Evaluation).delete({ id: id }) + await appServer.AppDataSource.getRepository(EvaluationRun).delete({ evaluationId: id }) + const results = await appServer.AppDataSource.getRepository(Evaluation).findBy(getWorkspaceSearchOptions(activeWorkspaceId)) + return results + } catch (error) { + throw new InternalFlowiseError( + StatusCodes.INTERNAL_SERVER_ERROR, + `Error: EvalsService.deleteEvaluation - ${getErrorMessage(error)}` + ) + } +} + +// check for outdated evaluations +const isOutdated = async (id: string) => { + try { + const appServer = getRunningExpressApp() + const evaluation = await appServer.AppDataSource.getRepository(Evaluation).findOneBy({ + id: id + }) + if (!evaluation) throw new Error(`Evaluation ${id} not found`) + const evaluationRunDate = evaluation.runDate.getTime() + let isOutdated = false + const returnObj: ICommonObject = { + isOutdated: false, + chatflows: [], + dataset: '', + errors: [] + } + + // check if the evaluation is outdated by extracting the runTime and then check with the dataset last updated time as well + // as the chatflows last updated time. If the evaluation is outdated, then return true else return false + const dataset = await appServer.AppDataSource.getRepository(Dataset).findOneBy({ + id: evaluation.datasetId + }) + if (dataset) { + const datasetLastUpdated = dataset.updatedDate.getTime() + if (datasetLastUpdated > evaluationRunDate) { + isOutdated = true + returnObj.dataset = dataset + } + } else { + returnObj.errors.push(`Dataset ${evaluation.datasetName} not found`) + isOutdated = true + } + const chatflows = JSON.parse(evaluation.chatflowId) + const chatflowNames = JSON.parse(evaluation.chatflowName) + + for (let i = 0; i < chatflows.length; i++) { + const chatflow = await appServer.AppDataSource.getRepository(ChatFlow).findOneBy({ + id: chatflows[i] + }) + if (!chatflow) { + returnObj.errors.push(`Chatflow ${chatflowNames[i]} not found`) + isOutdated = true + } else { + const chatflowLastUpdated = chatflow.updatedDate.getTime() + if (chatflowLastUpdated > evaluationRunDate) { + isOutdated = true + returnObj.chatflows.push({ + chatflowName: chatflowNames[i], + chatflowId: chatflows[i], + isOutdated: true + }) + } + } + } + returnObj.isOutdated = isOutdated + return returnObj + } catch (error) { + throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `Error: EvalsService.isOutdated - ${getErrorMessage(error)}`) + } +} + +const getEvaluation = async (id: string) => { + try { + const appServer = getRunningExpressApp() + const evaluation = await appServer.AppDataSource.getRepository(Evaluation).findOneBy({ + id: id + }) + if (!evaluation) throw new Error(`Evaluation ${id} not found`) + const versionCount = await appServer.AppDataSource.getRepository(Evaluation).countBy({ + name: evaluation.name + }) + const items = await appServer.AppDataSource.getRepository(EvaluationRun).find({ + where: { evaluationId: id } + }) + const versions = (await getVersions(id)).versions + const versionNo = versions.findIndex((version: any) => version.id === id) + 1 + return { + ...evaluation, + versionCount: versionCount, + versionNo: versionNo, + rows: items + } + } catch (error) { + throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `Error: EvalsService.getEvaluation - ${getErrorMessage(error)}`) + } +} + +const getVersions = async (id: string) => { + try { + const appServer = getRunningExpressApp() + const evaluation = await appServer.AppDataSource.getRepository(Evaluation).findOneBy({ + id: id + }) + if (!evaluation) throw new Error(`Evaluation ${id} not found`) + const versions = await appServer.AppDataSource.getRepository(Evaluation).find({ + where: { + name: evaluation.name + }, + order: { + runDate: 'ASC' + } + }) + const returnResults: any[] = [] + versions.map((version, index) => { + returnResults.push({ + id: version.id, + runDate: version.runDate, + version: index + 1 + }) + }) + return { + versions: returnResults + } + } catch (error) { + throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `Error: EvalsService.getEvaluation - ${getErrorMessage(error)}`) + } +} + +const patchDeleteEvaluations = async (ids: string[] = [], isDeleteAllVersion?: boolean, activeWorkspaceId?: string) => { + try { + const appServer = getRunningExpressApp() + const evalsToBeDeleted = await appServer.AppDataSource.getRepository(Evaluation).find({ + where: { + id: In(ids) + } + }) + await appServer.AppDataSource.getRepository(Evaluation).delete(ids) + for (const evaluation of evalsToBeDeleted) { + await appServer.AppDataSource.getRepository(EvaluationRun).delete({ evaluationId: evaluation.id }) + } + + if (isDeleteAllVersion) { + for (const evaluation of evalsToBeDeleted) { + const otherVersionEvals = await appServer.AppDataSource.getRepository(Evaluation).find({ + where: { + name: evaluation.name + } + }) + if (otherVersionEvals.length > 0) { + await appServer.AppDataSource.getRepository(Evaluation).delete( + [...otherVersionEvals].map((evaluation) => evaluation.id) + ) + for (const otherVersionEval of otherVersionEvals) { + await appServer.AppDataSource.getRepository(EvaluationRun).delete({ evaluationId: otherVersionEval.id }) + } + } + } + } + + const results = await appServer.AppDataSource.getRepository(Evaluation).findBy(getWorkspaceSearchOptions(activeWorkspaceId)) + return results + } catch (error) { + throw new InternalFlowiseError( + StatusCodes.INTERNAL_SERVER_ERROR, + `Error: EvalsService.patchDeleteEvaluations - ${getErrorMessage(error)}` + ) + } +} + +export default { + createEvaluation, + getAllEvaluations, + deleteEvaluation, + getEvaluation, + isOutdated, + runAgain, + getVersions, + patchDeleteEvaluations +} diff --git a/packages/server/src/services/evaluator/index.ts b/packages/server/src/services/evaluator/index.ts new file mode 100644 index 00000000000..ce52cf75896 --- /dev/null +++ b/packages/server/src/services/evaluator/index.ts @@ -0,0 +1,97 @@ +import { StatusCodes } from 'http-status-codes' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' +import { InternalFlowiseError } from '../../errors/internalFlowiseError' +import { getErrorMessage } from '../../errors/utils' +import { Evaluator } from '../../database/entities/Evaluator' +import { EvaluatorDTO } from '../../Interface.Evaluation' +import { getWorkspaceSearchOptions } from '../../enterprise/utils/ControllerServiceUtils' + +const getAllEvaluators = async (workspaceId?: string) => { + try { + const appServer = getRunningExpressApp() + const results: Evaluator[] = await appServer.AppDataSource.getRepository(Evaluator).findBy(getWorkspaceSearchOptions(workspaceId)) + return EvaluatorDTO.fromEntities(results) + } catch (error) { + throw new InternalFlowiseError( + StatusCodes.INTERNAL_SERVER_ERROR, + `Error: evaluatorService.getAllEvaluators - ${getErrorMessage(error)}` + ) + } +} + +const getEvaluator = async (id: string) => { + try { + const appServer = getRunningExpressApp() + const evaluator = await appServer.AppDataSource.getRepository(Evaluator).findOneBy({ + id: id + }) + if (!evaluator) throw new Error(`Evaluator ${id} not found`) + return EvaluatorDTO.fromEntity(evaluator) + } catch (error) { + throw new InternalFlowiseError( + StatusCodes.INTERNAL_SERVER_ERROR, + `Error: evaluatorService.getEvaluator - ${getErrorMessage(error)}` + ) + } +} + +// Create new Evaluator +const createEvaluator = async (body: any) => { + try { + const appServer = getRunningExpressApp() + const newDs = EvaluatorDTO.toEntity(body) + + const evaluator = appServer.AppDataSource.getRepository(Evaluator).create(newDs) + const result = await appServer.AppDataSource.getRepository(Evaluator).save(evaluator) + return EvaluatorDTO.fromEntity(result) + } catch (error) { + throw new InternalFlowiseError( + StatusCodes.INTERNAL_SERVER_ERROR, + `Error: evaluatorService.createEvaluator - ${getErrorMessage(error)}` + ) + } +} + +// Update Evaluator +const updateEvaluator = async (id: string, body: any) => { + try { + const appServer = getRunningExpressApp() + const evaluator = await appServer.AppDataSource.getRepository(Evaluator).findOneBy({ + id: id + }) + + if (!evaluator) throw new Error(`Evaluator ${id} not found`) + + const updateEvaluator = EvaluatorDTO.toEntity(body) + updateEvaluator.id = id + appServer.AppDataSource.getRepository(Evaluator).merge(evaluator, updateEvaluator) + const result = await appServer.AppDataSource.getRepository(Evaluator).save(evaluator) + return EvaluatorDTO.fromEntity(result) + } catch (error) { + throw new InternalFlowiseError( + StatusCodes.INTERNAL_SERVER_ERROR, + `Error: evaluatorService.updateEvaluator - ${getErrorMessage(error)}` + ) + } +} + +// Delete Evaluator via id +const deleteEvaluator = async (id: string) => { + try { + const appServer = getRunningExpressApp() + return await appServer.AppDataSource.getRepository(Evaluator).delete({ id: id }) + } catch (error) { + throw new InternalFlowiseError( + StatusCodes.INTERNAL_SERVER_ERROR, + `Error: evaluatorService.deleteEvaluator - ${getErrorMessage(error)}` + ) + } +} + +export default { + getAllEvaluators, + getEvaluator, + createEvaluator, + updateEvaluator, + deleteEvaluator +} diff --git a/packages/server/src/services/executions/index.ts b/packages/server/src/services/executions/index.ts index 899d6a092c9..b8cc136987e 100644 --- a/packages/server/src/services/executions/index.ts +++ b/packages/server/src/services/executions/index.ts @@ -1,14 +1,14 @@ import { StatusCodes } from 'http-status-codes' +import { In } from 'typeorm' +import { ChatMessage } from '../../database/entities/ChatMessage' +import { Execution } from '../../database/entities/Execution' import { InternalFlowiseError } from '../../errors/internalFlowiseError' import { getErrorMessage } from '../../errors/utils' -import { getRunningExpressApp } from '../../utils/getRunningExpressApp' -import { Execution } from '../../database/entities/Execution' import { ExecutionState, IAgentflowExecutedData } from '../../Interface' -import { In } from 'typeorm' -import { ChatMessage } from '../../database/entities/ChatMessage' import { _removeCredentialId } from '../../utils/buildAgentflow' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' -interface ExecutionFilters { +export interface ExecutionFilters { id?: string agentflowId?: string sessionId?: string @@ -17,13 +17,19 @@ interface ExecutionFilters { endDate?: Date page?: number limit?: number + workspaceId?: string } -const getExecutionById = async (executionId: string): Promise => { +const getExecutionById = async (executionId: string, workspaceId?: string): Promise => { try { const appServer = getRunningExpressApp() const executionRepository = appServer.AppDataSource.getRepository(Execution) - const res = await executionRepository.findOne({ where: { id: executionId } }) + + const query: any = { id: executionId } + // Add workspace filtering if provided + if (workspaceId) query.workspaceId = workspaceId + + const res = await executionRepository.findOne({ where: query }) if (!res) { throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Execution ${executionId} not found`) } @@ -59,7 +65,7 @@ const getPublicExecutionById = async (executionId: string): Promise => { try { const appServer = getRunningExpressApp() - const { id, agentflowId, sessionId, state, startDate, endDate, page = 1, limit = 10 } = filters + const { id, agentflowId, sessionId, state, startDate, endDate, page = 1, limit = 10, workspaceId } = filters // Handle UUID fields properly using raw parameters to avoid type conversion issues // This uses the query builder instead of direct objects for compatibility with UUID fields @@ -74,6 +80,7 @@ const getAllExecutions = async (filters: ExecutionFilters = {}): Promise<{ data: if (agentflowId) queryBuilder.andWhere('execution.agentflowId = :agentflowId', { agentflowId }) if (sessionId) queryBuilder.andWhere('execution.sessionId = :sessionId', { sessionId }) if (state) queryBuilder.andWhere('execution.state = :state', { state }) + if (workspaceId) queryBuilder.andWhere('execution.workspaceId = :workspaceId', { workspaceId }) // Date range conditions if (startDate && endDate) { @@ -95,12 +102,15 @@ const getAllExecutions = async (filters: ExecutionFilters = {}): Promise<{ data: } } -const updateExecution = async (executionId: string, data: Partial): Promise => { +const updateExecution = async (executionId: string, data: Partial, workspaceId?: string): Promise => { try { const appServer = getRunningExpressApp() - const execution = await appServer.AppDataSource.getRepository(Execution).findOneBy({ - id: executionId - }) + + const query: any = { id: executionId } + // Add workspace filtering if provided + if (workspaceId) query.workspaceId = workspaceId + + const execution = await appServer.AppDataSource.getRepository(Execution).findOneBy(query) if (!execution) { throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Execution ${executionId} not found`) } @@ -120,17 +130,20 @@ const updateExecution = async (executionId: string, data: Partial): P /** * Delete multiple executions by their IDs * @param executionIds Array of execution IDs to delete + * @param workspaceId Optional workspace ID to filter executions * @returns Object with success status and count of deleted executions */ -const deleteExecutions = async (executionIds: string[]): Promise<{ success: boolean; deletedCount: number }> => { +const deleteExecutions = async (executionIds: string[], workspaceId?: string): Promise<{ success: boolean; deletedCount: number }> => { try { const appServer = getRunningExpressApp() const executionRepository = appServer.AppDataSource.getRepository(Execution) - // Delete executions where id is in the provided array - const result = await executionRepository.delete({ - id: In(executionIds) - }) + // Create the where condition with workspace filtering if provided + const whereCondition: any = { id: In(executionIds) } + if (workspaceId) whereCondition.workspaceId = workspaceId + + // Delete executions where id is in the provided array and belongs to the workspace + const result = await executionRepository.delete(whereCondition) // Update chat message executionId column to NULL await appServer.AppDataSource.getRepository(ChatMessage).update({ executionId: In(executionIds) }, { executionId: null as any }) diff --git a/packages/server/src/services/export-import/index.ts b/packages/server/src/services/export-import/index.ts index 775596779a2..87ddc6ade2c 100644 --- a/packages/server/src/services/export-import/index.ts +++ b/packages/server/src/services/export-import/index.ts @@ -13,12 +13,15 @@ import { Tool } from '../../database/entities/Tool' import { Variable } from '../../database/entities/Variable' import { InternalFlowiseError } from '../../errors/internalFlowiseError' import { getErrorMessage } from '../../errors/utils' +import assistantsService from '../../services/assistants' +import chatflowsService from '../../services/chatflows' import { getRunningExpressApp } from '../../utils/getRunningExpressApp' +import { checkUsageLimit } from '../../utils/quotaUsage' import assistantService from '../assistants' import chatMessagesService from '../chat-messages' import chatflowService from '../chatflows' import documenStoreService from '../documentstore' -import executionService from '../executions' +import executionService, { ExecutionFilters } from '../executions' import marketplacesService from '../marketplaces' import toolsService from '../tools' import variableService from '../variables' @@ -85,39 +88,58 @@ const convertExportInput = (body: any): ExportInput => { } const FileDefaultName = 'ExportData.json' -const exportData = async (exportInput: ExportInput): Promise<{ FileDefaultName: string } & ExportData> => { +const exportData = async (exportInput: ExportInput, activeWorkspaceId?: string): Promise<{ FileDefaultName: string } & ExportData> => { try { - let AgentFlow: ChatFlow[] = exportInput.agentflow === true ? await chatflowService.getAllChatflows('MULTIAGENT') : [] - let AgentFlowV2: ChatFlow[] = exportInput.agentflowv2 === true ? await chatflowService.getAllChatflows('AGENTFLOW') : [] + let AgentFlow: ChatFlow[] = + exportInput.agentflow === true ? await chatflowService.getAllChatflows('MULTIAGENT', activeWorkspaceId) : [] - let AssistantCustom: Assistant[] = exportInput.assistantCustom === true ? await assistantService.getAllAssistants('CUSTOM') : [] - let AssistantFlow: ChatFlow[] = exportInput.assistantCustom === true ? await chatflowService.getAllChatflows('ASSISTANT') : [] + let AgentFlowV2: ChatFlow[] = + exportInput.agentflowv2 === true ? await chatflowService.getAllChatflows('AGENTFLOW', activeWorkspaceId) : [] - let AssistantOpenAI: Assistant[] = exportInput.assistantOpenAI === true ? await assistantService.getAllAssistants('OPENAI') : [] + let AssistantCustom: Assistant[] = + exportInput.assistantCustom === true ? await assistantService.getAllAssistants('CUSTOM', activeWorkspaceId) : [] + let AssistantFlow: ChatFlow[] = + exportInput.assistantCustom === true ? await chatflowService.getAllChatflows('ASSISTANT', activeWorkspaceId) : [] - let AssistantAzure: Assistant[] = exportInput.assistantAzure === true ? await assistantService.getAllAssistants('AZURE') : [] + let AssistantOpenAI: Assistant[] = + exportInput.assistantOpenAI === true ? await assistantService.getAllAssistants('OPENAI', activeWorkspaceId) : [] - let ChatFlow: ChatFlow[] = exportInput.chatflow === true ? await chatflowService.getAllChatflows('CHATFLOW') : [] + let AssistantAzure: Assistant[] = + exportInput.assistantAzure === true ? await assistantService.getAllAssistants('AZURE', activeWorkspaceId) : [] - let ChatMessage: ChatMessage[] = exportInput.chat_message === true ? await chatMessagesService.getAllMessages() : [] + let ChatFlow: ChatFlow[] = exportInput.chatflow === true ? await chatflowService.getAllChatflows('CHATFLOW', activeWorkspaceId) : [] + + const allChatflow: ChatFlow[] = + exportInput.chat_message === true || exportInput.chat_feedback === true + ? await chatflowService.getAllChatflows(undefined, activeWorkspaceId) + : [] + const chatflowIds = allChatflow.map((chatflow) => chatflow.id) + + let ChatMessage: ChatMessage[] = + exportInput.chat_message === true ? await chatMessagesService.getMessagesByChatflowIds(chatflowIds) : [] let ChatMessageFeedback: ChatMessageFeedback[] = - exportInput.chat_feedback === true ? await chatMessagesService.getAllMessagesFeedback() : [] + exportInput.chat_feedback === true ? await chatMessagesService.getMessagesFeedbackByChatflowIds(chatflowIds) : [] - let CustomTemplate: CustomTemplate[] = exportInput.custom_template === true ? await marketplacesService.getAllCustomTemplates() : [] - CustomTemplate = CustomTemplate.map((customTemplate) => ({ ...customTemplate, usecases: JSON.stringify(customTemplate.usecases) })) + let CustomTemplate: CustomTemplate[] = + exportInput.custom_template === true ? await marketplacesService.getAllCustomTemplates(activeWorkspaceId) : [] - let DocumentStore: DocumentStore[] = exportInput.document_store === true ? await documenStoreService.getAllDocumentStores() : [] + let DocumentStore: DocumentStore[] = + exportInput.document_store === true ? await documenStoreService.getAllDocumentStores(activeWorkspaceId) : [] + const documentStoreIds = DocumentStore.map((documentStore) => documentStore.id) let DocumentStoreFileChunk: DocumentStoreFileChunk[] = - exportInput.document_store === true ? await documenStoreService.getAllDocumentFileChunks() : [] + exportInput.document_store === true + ? await documenStoreService.getAllDocumentFileChunksByDocumentStoreIds(documentStoreIds) + : [] - const { data: totalExecutions } = exportInput.execution === true ? await executionService.getAllExecutions() : { data: [] } + const filters: ExecutionFilters = { workspaceId: activeWorkspaceId } + const { data: totalExecutions } = exportInput.execution === true ? await executionService.getAllExecutions(filters) : { data: [] } let Execution: Execution[] = exportInput.execution === true ? totalExecutions : [] - let Tool: Tool[] = exportInput.tool === true ? await toolsService.getAllTools() : [] + let Tool: Tool[] = exportInput.tool === true ? await toolsService.getAllTools(activeWorkspaceId) : [] - let Variable: Variable[] = exportInput.variable === true ? await variableService.getAllVariables() : [] + let Variable: Variable[] = exportInput.variable === true ? await variableService.getAllVariables(activeWorkspaceId) : [] return { FileDefaultName, @@ -187,7 +209,12 @@ async function replaceDuplicateIdsForAssistant(queryRunner: QueryRunner, origina } } -async function replaceDuplicateIdsForChatMessage(queryRunner: QueryRunner, originalData: ExportData, chatMessages: ChatMessage[]) { +async function replaceDuplicateIdsForChatMessage( + queryRunner: QueryRunner, + originalData: ExportData, + chatMessages: ChatMessage[], + activeWorkspaceId?: string +) { try { const chatmessageChatflowIds = chatMessages.map((chatMessage) => { return { id: chatMessage.chatflowid, qty: 0 } @@ -205,7 +232,10 @@ async function replaceDuplicateIdsForChatMessage(queryRunner: QueryRunner, origi }) const databaseChatflowIds = await ( await queryRunner.manager.find(ChatFlow, { - where: { id: In(chatmessageChatflowIds.map((chatmessageChatflowId) => chatmessageChatflowId.id)) } + where: { + id: In(chatmessageChatflowIds.map((chatmessageChatflowId) => chatmessageChatflowId.id)), + workspaceId: activeWorkspaceId + } }) ).map((chatflow) => chatflow.id) chatmessageChatflowIds.forEach((item) => { @@ -239,7 +269,12 @@ async function replaceDuplicateIdsForChatMessage(queryRunner: QueryRunner, origi } } -async function replaceExecutionIdForChatMessage(queryRunner: QueryRunner, originalData: ExportData, chatMessages: ChatMessage[]) { +async function replaceExecutionIdForChatMessage( + queryRunner: QueryRunner, + originalData: ExportData, + chatMessages: ChatMessage[], + activeWorkspaceId?: string +) { try { // step 1 - get all execution ids from chatMessages const chatMessageExecutionIds = chatMessages @@ -259,7 +294,10 @@ async function replaceExecutionIdForChatMessage(queryRunner: QueryRunner, origin // step 3 - increase qty if execution id is in database const databaseExecutionIds = await ( await queryRunner.manager.find(Execution, { - where: { id: In(chatMessageExecutionIds.map((chatMessageExecutionId) => chatMessageExecutionId.id)) } + where: { + id: In(chatMessageExecutionIds.map((chatMessageExecutionId) => chatMessageExecutionId.id)), + workspaceId: activeWorkspaceId + } }) ).map((execution) => execution.id) chatMessageExecutionIds.forEach((item) => { @@ -290,7 +328,8 @@ async function replaceExecutionIdForChatMessage(queryRunner: QueryRunner, origin async function replaceDuplicateIdsForChatMessageFeedback( queryRunner: QueryRunner, originalData: ExportData, - chatMessageFeedbacks: ChatMessageFeedback[] + chatMessageFeedbacks: ChatMessageFeedback[], + activeWorkspaceId?: string ) { try { const feedbackChatflowIds = chatMessageFeedbacks.map((feedback) => { @@ -309,7 +348,7 @@ async function replaceDuplicateIdsForChatMessageFeedback( }) const databaseChatflowIds = await ( await queryRunner.manager.find(ChatFlow, { - where: { id: In(feedbackChatflowIds.map((feedbackChatflowId) => feedbackChatflowId.id)) } + where: { id: In(feedbackChatflowIds.map((feedbackChatflowId) => feedbackChatflowId.id)), workspaceId: activeWorkspaceId } }) ).map((chatflow) => chatflow.id) feedbackChatflowIds.forEach((item) => { @@ -503,7 +542,32 @@ function reduceSpaceForChatflowFlowData(chatflows: ChatFlow[]) { }) } -const importData = async (importData: ExportData) => { +function insertWorkspaceId(importedData: any, activeWorkspaceId?: string) { + if (!activeWorkspaceId) return importedData + importedData.forEach((item: any) => { + item.workspaceId = activeWorkspaceId + }) + return importedData +} + +const importData = async (importData: ExportData, orgId: string, activeWorkspaceId: string, subscriptionId: string) => { + // Initialize missing properties with empty arrays to avoid "undefined" errors + importData.AgentFlow = importData.AgentFlow || [] + importData.AgentFlowV2 = importData.AgentFlowV2 || [] + importData.AssistantCustom = importData.AssistantCustom || [] + importData.AssistantFlow = importData.AssistantFlow || [] + importData.AssistantOpenAI = importData.AssistantOpenAI || [] + importData.AssistantAzure = importData.AssistantAzure || [] + importData.ChatFlow = importData.ChatFlow || [] + importData.ChatMessage = importData.ChatMessage || [] + importData.ChatMessageFeedback = importData.ChatMessageFeedback || [] + importData.CustomTemplate = importData.CustomTemplate || [] + importData.DocumentStore = importData.DocumentStore || [] + importData.DocumentStoreFileChunk = importData.DocumentStoreFileChunk || [] + importData.Execution = importData.Execution || [] + importData.Tool = importData.Tool || [] + importData.Variable = importData.Variable || [] + let queryRunner try { queryRunner = getRunningExpressApp().AppDataSource.createQueryRunner() @@ -512,43 +576,125 @@ const importData = async (importData: ExportData) => { try { if (importData.AgentFlow.length > 0) { importData.AgentFlow = reduceSpaceForChatflowFlowData(importData.AgentFlow) + importData.AgentFlow = insertWorkspaceId(importData.AgentFlow, activeWorkspaceId) + const existingChatflowCount = await chatflowsService.getAllChatflowsCountByOrganization('MULTIAGENT', orgId) + const newChatflowCount = importData.AgentFlow.length + await checkUsageLimit( + 'flows', + subscriptionId, + getRunningExpressApp().usageCacheManager, + existingChatflowCount + newChatflowCount + ) importData = await replaceDuplicateIdsForChatFlow(queryRunner, importData, importData.AgentFlow) } if (importData.AgentFlowV2.length > 0) { importData.AgentFlowV2 = reduceSpaceForChatflowFlowData(importData.AgentFlowV2) + importData.AgentFlowV2 = insertWorkspaceId(importData.AgentFlowV2, activeWorkspaceId) + const existingChatflowCount = await chatflowsService.getAllChatflowsCountByOrganization('AGENTFLOW', orgId) + const newChatflowCount = importData.AgentFlowV2.length + await checkUsageLimit( + 'flows', + subscriptionId, + getRunningExpressApp().usageCacheManager, + existingChatflowCount + newChatflowCount + ) importData = await replaceDuplicateIdsForChatFlow(queryRunner, importData, importData.AgentFlowV2) } - if (importData.AssistantCustom.length > 0) + if (importData.AssistantCustom.length > 0) { + importData.AssistantCustom = insertWorkspaceId(importData.AssistantCustom, activeWorkspaceId) + const existingAssistantCount = await assistantsService.getAssistantsCountByOrganization('CUSTOM', orgId) + const newAssistantCount = importData.AssistantCustom.length + await checkUsageLimit( + 'flows', + subscriptionId, + getRunningExpressApp().usageCacheManager, + existingAssistantCount + newAssistantCount + ) importData = await replaceDuplicateIdsForAssistant(queryRunner, importData, importData.AssistantCustom) + } if (importData.AssistantFlow.length > 0) { importData.AssistantFlow = reduceSpaceForChatflowFlowData(importData.AssistantFlow) + importData.AssistantFlow = insertWorkspaceId(importData.AssistantFlow, activeWorkspaceId) + const existingChatflowCount = await chatflowsService.getAllChatflowsCountByOrganization('ASSISTANT', orgId) + const newChatflowCount = importData.AssistantFlow.length + await checkUsageLimit( + 'flows', + subscriptionId, + getRunningExpressApp().usageCacheManager, + existingChatflowCount + newChatflowCount + ) importData = await replaceDuplicateIdsForChatFlow(queryRunner, importData, importData.AssistantFlow) } - if (importData.AssistantOpenAI.length > 0) + if (importData.AssistantOpenAI.length > 0) { + importData.AssistantOpenAI = insertWorkspaceId(importData.AssistantOpenAI, activeWorkspaceId) + const existingAssistantCount = await assistantsService.getAssistantsCountByOrganization('OPENAI', orgId) + const newAssistantCount = importData.AssistantOpenAI.length + await checkUsageLimit( + 'flows', + subscriptionId, + getRunningExpressApp().usageCacheManager, + existingAssistantCount + newAssistantCount + ) importData = await replaceDuplicateIdsForAssistant(queryRunner, importData, importData.AssistantOpenAI) - if (importData.AssistantAzure.length > 0) + } + if (importData.AssistantAzure.length > 0) { + importData.AssistantAzure = insertWorkspaceId(importData.AssistantAzure, activeWorkspaceId) + const existingAssistantCount = await assistantsService.getAssistantsCountByOrganization('AZURE', orgId) + const newAssistantCount = importData.AssistantAzure.length + await checkUsageLimit( + 'flows', + subscriptionId, + getRunningExpressApp().usageCacheManager, + existingAssistantCount + newAssistantCount + ) importData = await replaceDuplicateIdsForAssistant(queryRunner, importData, importData.AssistantAzure) + } if (importData.ChatFlow.length > 0) { importData.ChatFlow = reduceSpaceForChatflowFlowData(importData.ChatFlow) + importData.ChatFlow = insertWorkspaceId(importData.ChatFlow, activeWorkspaceId) + const existingChatflowCount = await chatflowsService.getAllChatflowsCountByOrganization('CHATFLOW', orgId) + const newChatflowCount = importData.ChatFlow.length + await checkUsageLimit( + 'flows', + subscriptionId, + getRunningExpressApp().usageCacheManager, + existingChatflowCount + newChatflowCount + ) importData = await replaceDuplicateIdsForChatFlow(queryRunner, importData, importData.ChatFlow) } if (importData.ChatMessage.length > 0) { - importData = await replaceDuplicateIdsForChatMessage(queryRunner, importData, importData.ChatMessage) - importData = await replaceExecutionIdForChatMessage(queryRunner, importData, importData.ChatMessage) + importData = await replaceDuplicateIdsForChatMessage(queryRunner, importData, importData.ChatMessage, activeWorkspaceId) + importData = await replaceExecutionIdForChatMessage(queryRunner, importData, importData.ChatMessage, activeWorkspaceId) } if (importData.ChatMessageFeedback.length > 0) - importData = await replaceDuplicateIdsForChatMessageFeedback(queryRunner, importData, importData.ChatMessageFeedback) - if (importData.CustomTemplate.length > 0) + importData = await replaceDuplicateIdsForChatMessageFeedback( + queryRunner, + importData, + importData.ChatMessageFeedback, + activeWorkspaceId + ) + if (importData.CustomTemplate.length > 0) { + importData.CustomTemplate = insertWorkspaceId(importData.CustomTemplate, activeWorkspaceId) importData = await replaceDuplicateIdsForCustomTemplate(queryRunner, importData, importData.CustomTemplate) - if (importData.DocumentStore.length > 0) + } + if (importData.DocumentStore.length > 0) { + importData.DocumentStore = insertWorkspaceId(importData.DocumentStore, activeWorkspaceId) importData = await replaceDuplicateIdsForDocumentStore(queryRunner, importData, importData.DocumentStore) + } if (importData.DocumentStoreFileChunk.length > 0) importData = await replaceDuplicateIdsForDocumentStoreFileChunk(queryRunner, importData, importData.DocumentStoreFileChunk) - if (importData.Tool.length > 0) importData = await replaceDuplicateIdsForTool(queryRunner, importData, importData.Tool) - if (importData.Execution.length > 0) + if (importData.Tool.length > 0) { + importData.Tool = insertWorkspaceId(importData.Tool, activeWorkspaceId) + importData = await replaceDuplicateIdsForTool(queryRunner, importData, importData.Tool) + } + if (importData.Execution.length > 0) { + importData.Execution = insertWorkspaceId(importData.Execution, activeWorkspaceId) importData = await replaceDuplicateIdsForExecution(queryRunner, importData, importData.Execution) - if (importData.Variable.length > 0) + } + if (importData.Variable.length > 0) { + importData.Variable = insertWorkspaceId(importData.Variable, activeWorkspaceId) importData = await replaceDuplicateIdsForVariable(queryRunner, importData, importData.Variable) + } await queryRunner.startTransaction() @@ -572,10 +718,10 @@ const importData = async (importData: ExportData) => { await queryRunner.commitTransaction() } catch (error) { - if (queryRunner && !queryRunner.isTransactionActive) await queryRunner.rollbackTransaction() + if (queryRunner.isTransactionActive) await queryRunner.rollbackTransaction() throw error } finally { - if (queryRunner && !queryRunner.isReleased) await queryRunner.release() + if (!queryRunner.isReleased) await queryRunner.release() } } catch (error) { throw new InternalFlowiseError( diff --git a/packages/server/src/services/log/index.ts b/packages/server/src/services/log/index.ts new file mode 100644 index 00000000000..1f4227f61a1 --- /dev/null +++ b/packages/server/src/services/log/index.ts @@ -0,0 +1,94 @@ +import path from 'path' +import * as fs from 'fs' +import { StatusCodes } from 'http-status-codes' +import { InternalFlowiseError } from '../../errors/internalFlowiseError' +import { getErrorMessage } from '../../errors/utils' +import readline from 'readline' + +const readFile = (filePath: string) => { + return new Promise(function (resolve, reject) { + const lines: string[] = [] + var rl = readline.createInterface({ + input: fs.createReadStream(filePath) + }) + + rl.on('line', (line) => { + lines.push(line) + }) + + rl.on('close', () => { + // Add newlines to lines + resolve(lines.join('\n')) + }) + + rl.on('error', (error) => { + reject(`Error reading file ${filePath}: ${error}`) + }) + }) +} + +const generateDateRange = (startDate: string, endDate: string) => { + const start = startDate.split('-') + const end = endDate.split('-') + const startYear = parseInt(start[0], 10) + const startMonth = parseInt(start[1], 10) - 1 // JS months are 0-indexed + const startDay = parseInt(start[2], 10) + const startHour = parseInt(start[3], 10) + + const endYear = parseInt(end[0], 10) + const endMonth = parseInt(end[1], 10) - 1 + const endDay = parseInt(end[2], 10) + const endHour = parseInt(end[3], 10) + + const result = [] + const startTime = new Date(startYear, startMonth, startDay, startHour) + const endTime = new Date(endYear, endMonth, endDay, endHour) + + for (let time = startTime; time <= endTime; time.setHours(time.getHours() + 1)) { + const year = time.getFullYear() + const month = (time.getMonth() + 1).toString().padStart(2, '0') + const day = time.getDate().toString().padStart(2, '0') + const hour = time.getHours().toString().padStart(2, '0') + result.push(`${year}-${month}-${day}-${hour}`) + } + + return result +} + +const getLogs = async (startDate?: string, endDate?: string) => { + if (!startDate || !endDate) { + throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `Error: logService.getLogs - No start date or end date provided`) + } + + if (startDate > endDate) { + throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `Error: logService.getLogs - Start date is greater than end date`) + } + + try { + var promises = [] + const files = generateDateRange(startDate, endDate) + + for (let i = 0; i < files.length; i++) { + const date = files[i] + const filePath = process.env.LOG_PATH + ? path.resolve(process.env.LOG_PATH, `server.log.${date}`) + : path.join(__dirname, '..', '..', '..', 'logs', `server.log.${date}`) + if (fs.existsSync(filePath)) { + promises.push(readFile(filePath)) + } else { + // console.error(`File ${filePath} not found`) + } + + if (i === files.length - 1) { + const results = await Promise.all(promises) + return results + } + } + } catch (error) { + throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `Error: logService.getLogs - ${getErrorMessage(error)}`) + } +} + +export default { + getLogs +} diff --git a/packages/server/src/services/marketplaces/index.ts b/packages/server/src/services/marketplaces/index.ts index 33bc54920a5..bea25d4f8b5 100644 --- a/packages/server/src/services/marketplaces/index.ts +++ b/packages/server/src/services/marketplaces/index.ts @@ -8,8 +8,9 @@ import { getRunningExpressApp } from '../../utils/getRunningExpressApp' import { DeleteResult } from 'typeorm' import { CustomTemplate } from '../../database/entities/CustomTemplate' import { v4 as uuidv4 } from 'uuid' - import chatflowsService from '../chatflows' +import { getWorkspaceSearchOptions } from '../../enterprise/utils/ControllerServiceUtils' +import { WorkspaceService } from '../../enterprise/services/workspace.service' type ITemplate = { badge: string @@ -134,30 +135,50 @@ const deleteCustomTemplate = async (templateId: string): Promise = } } -const getAllCustomTemplates = async (): Promise => { +const _modifyTemplates = (templates: any[]) => { + templates.map((template) => { + template.usecases = template.usecases ? JSON.parse(template.usecases) : '' + if (template.type === 'Tool') { + template.flowData = JSON.parse(template.flowData) + template.iconSrc = template.flowData.iconSrc + template.schema = template.flowData.schema + template.func = template.flowData.func + template.categories = [] + template.flowData = undefined + } else { + template.categories = getCategories(JSON.parse(template.flowData)) + } + if (!template.badge) { + template.badge = '' + } + if (!template.framework) { + template.framework = '' + } + }) +} + +const getAllCustomTemplates = async (workspaceId?: string): Promise => { try { const appServer = getRunningExpressApp() - const templates: any[] = await appServer.AppDataSource.getRepository(CustomTemplate).find() - templates.map((template) => { - template.usecases = template.usecases ? JSON.parse(template.usecases) : '' - if (template.type === 'Tool') { - template.flowData = JSON.parse(template.flowData) - template.iconSrc = template.flowData.iconSrc - template.schema = template.flowData.schema - template.func = template.flowData.func - template.categories = [] - template.flowData = undefined - } else { - template.categories = getCategories(JSON.parse(template.flowData)) + const templates: any[] = await appServer.AppDataSource.getRepository(CustomTemplate).findBy(getWorkspaceSearchOptions(workspaceId)) + const dbResponse = [] + _modifyTemplates(templates) + dbResponse.push(...templates) + // get shared credentials + if (workspaceId) { + const workspaceService = new WorkspaceService() + const sharedItems = (await workspaceService.getSharedItemsForWorkspace(workspaceId, 'custom_template')) as CustomTemplate[] + if (sharedItems && sharedItems.length) { + _modifyTemplates(sharedItems) + // add shared = true flag to all shared items, to differentiate them in the UI + sharedItems.forEach((sharedItem) => { + // @ts-ignore + sharedItem.shared = true + dbResponse.push(sharedItem) + }) } - if (!template.badge) { - template.badge = '' - } - if (!template.framework) { - template.framework = '' - } - }) - return templates + } + return dbResponse } catch (error) { throw new InternalFlowiseError( StatusCodes.INTERNAL_SERVER_ERROR, diff --git a/packages/server/src/services/nodes/index.ts b/packages/server/src/services/nodes/index.ts index a55932d7659..a709a8bd2af 100644 --- a/packages/server/src/services/nodes/index.ts +++ b/packages/server/src/services/nodes/index.ts @@ -1,12 +1,14 @@ -import { cloneDeep } from 'lodash' +import { cloneDeep, omit } from 'lodash' import { StatusCodes } from 'http-status-codes' import { getRunningExpressApp } from '../../utils/getRunningExpressApp' -import { INodeData } from '../../Interface' -import { INodeOptionsValue, ICommonObject, handleEscapeCharacters } from 'flowise-components' +import { INodeData, MODE } from '../../Interface' +import { INodeOptionsValue } from 'flowise-components' import { databaseEntities } from '../../utils' import logger from '../../utils/logger' import { InternalFlowiseError } from '../../errors/internalFlowiseError' import { getErrorMessage } from '../../errors/utils' +import { OMIT_QUEUE_JOB_DATA } from '../../utils/constants' +import { executeCustomNodeFunction } from '../../utils/executeCustomNodeFunction' // Get all component nodes const getAllNodes = async () => { @@ -100,7 +102,8 @@ const getSingleNodeAsyncOptions = async (nodeName: string, requestBody: any): Pr databaseEntities: databaseEntities, componentNodes: appServer.nodesPool.componentNodes, previousNodes: requestBody.previousNodes, - currentNode: requestBody.currentNode + currentNode: requestBody.currentNode, + searchOptions: requestBody.searchOptions }) return dbResponse @@ -119,48 +122,31 @@ const getSingleNodeAsyncOptions = async (nodeName: string, requestBody: any): Pr } // execute custom function node -const executeCustomFunction = async (requestBody: any) => { - try { - const appServer = getRunningExpressApp() - const body = requestBody - const functionInputVariables = Object.fromEntries( - [...(body?.javascriptFunction ?? '').matchAll(/\$([a-zA-Z0-9_]+)/g)].map((g) => [g[1], undefined]) - ) - if (functionInputVariables && Object.keys(functionInputVariables).length) { - for (const key in functionInputVariables) { - if (key.includes('vars')) { - delete functionInputVariables[key] - } - } - } - const nodeData = { inputs: { functionInputVariables, ...body } } - if (Object.prototype.hasOwnProperty.call(appServer.nodesPool.componentNodes, 'customFunction')) { - try { - const nodeInstanceFilePath = appServer.nodesPool.componentNodes['customFunction'].filePath as string - const nodeModule = await import(nodeInstanceFilePath) - const newNodeInstance = new nodeModule.nodeClass() +const executeCustomFunction = async (requestBody: any, orgId?: string) => { + const appServer = getRunningExpressApp() + const executeData = { + appDataSource: appServer.AppDataSource, + componentNodes: appServer.nodesPool.componentNodes, + data: requestBody, + isExecuteCustomFunction: true, + orgId + } - const options: ICommonObject = { - appDataSource: appServer.AppDataSource, - databaseEntities, - logger - } + if (process.env.MODE === MODE.QUEUE) { + const predictionQueue = appServer.queueManager.getQueue('prediction') - const returnData = await newNodeInstance.init(nodeData, '', options) - const dbResponse = typeof returnData === 'string' ? handleEscapeCharacters(returnData, true) : returnData + const job = await predictionQueue.addJob(omit(executeData, OMIT_QUEUE_JOB_DATA)) + logger.debug(`[server]: Execute Custom Function Job added to queue by ${orgId}: ${job.id}`) - return dbResponse - } catch (error) { - throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `Error running custom function: ${error}`) - } - } else { - throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Node customFunction not found`) + const queueEvents = predictionQueue.getQueueEvents() + const result = await job.waitUntilFinished(queueEvents) + if (!result) { + throw new Error('Failed to execute custom function') } - } catch (error) { - throw new InternalFlowiseError( - StatusCodes.INTERNAL_SERVER_ERROR, - `Error: nodesService.executeCustomFunction - ${getErrorMessage(error)}` - ) + + return result + } else { + return await executeCustomNodeFunction(executeData) } } diff --git a/packages/server/src/services/openai-realtime/index.ts b/packages/server/src/services/openai-realtime/index.ts index c1d9c6251fc..f16c96b1934 100644 --- a/packages/server/src/services/openai-realtime/index.ts +++ b/packages/server/src/services/openai-realtime/index.ts @@ -17,6 +17,9 @@ import { ICommonObject, INodeData } from 'flowise-components' import { convertToOpenAIFunction } from '@langchain/core/utils/function_calling' import { v4 as uuidv4 } from 'uuid' import { Variable } from '../../database/entities/Variable' +import { getWorkspaceSearchOptions } from '../../enterprise/utils/ControllerServiceUtils' +import { Workspace } from '../../enterprise/database/entities/workspace.entity' +import { Organization } from '../../enterprise/database/entities/organization.entity' const SOURCE_DOCUMENTS_PREFIX = '\n\n----FLOWISE_SOURCE_DOCUMENTS----\n\n' const ARTIFACTS_PREFIX = '\n\n----FLOWISE_ARTIFACTS----\n\n' @@ -60,9 +63,30 @@ const buildAndInitTool = async (chatflowid: string, _chatId?: string, _apiMessag } startingNodeIds = [...new Set(startingNodeIds)] - const availableVariables = await appServer.AppDataSource.getRepository(Variable).find() + /*** Get API Config ***/ + const availableVariables = await appServer.AppDataSource.getRepository(Variable).findBy(getWorkspaceSearchOptions(chatflow.workspaceId)) const { nodeOverrides, variableOverrides, apiOverrideStatus } = getAPIOverrideConfig(chatflow) + // This can be public API, so we can only get orgId from the chatflow + const chatflowWorkspaceId = chatflow.workspaceId + const workspace = await appServer.AppDataSource.getRepository(Workspace).findOneBy({ + id: chatflowWorkspaceId + }) + if (!workspace) { + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Workspace ${chatflowWorkspaceId} not found`) + } + const workspaceId = workspace.id + + const org = await appServer.AppDataSource.getRepository(Organization).findOneBy({ + id: workspace.organizationId + }) + if (!org) { + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Organization ${workspace.organizationId} not found`) + } + + const orgId = org.id + const subscriptionId = org.subscriptionId + const reactFlowNodes = await buildFlow({ startingNodeIds, reactFlowNodes: nodes, @@ -77,10 +101,15 @@ const buildAndInitTool = async (chatflowid: string, _chatId?: string, _apiMessag chatflowid, apiMessageId, appDataSource: appServer.AppDataSource, + usageCacheManager: appServer.usageCacheManager, + cachePool: appServer.cachePool, apiOverrideStatus, nodeOverrides, availableVariables, - variableOverrides + variableOverrides, + orgId, + workspaceId, + subscriptionId }) const nodeToExecute = diff --git a/packages/server/src/services/predictions/index.ts b/packages/server/src/services/predictions/index.ts index 6f2dbe199c1..5d1d71ec098 100644 --- a/packages/server/src/services/predictions/index.ts +++ b/packages/server/src/services/predictions/index.ts @@ -4,9 +4,9 @@ import { utilBuildChatflow } from '../../utils/buildChatflow' import { InternalFlowiseError } from '../../errors/internalFlowiseError' import { getErrorMessage } from '../../errors/utils' -const buildChatflow = async (fullRequest: Request) => { +const buildChatflow = async (req: Request) => { try { - const dbResponse = await utilBuildChatflow(fullRequest) + const dbResponse = await utilBuildChatflow(req) return dbResponse } catch (error) { throw new InternalFlowiseError( diff --git a/packages/server/src/services/settings/index.ts b/packages/server/src/services/settings/index.ts new file mode 100644 index 00000000000..18f73c8135e --- /dev/null +++ b/packages/server/src/services/settings/index.ts @@ -0,0 +1,33 @@ +// TODO: add settings + +import { Platform } from '../../Interface' +import { getRunningExpressApp } from '../../utils/getRunningExpressApp' + +const getSettings = async () => { + try { + const appServer = getRunningExpressApp() + const platformType = appServer.identityManager.getPlatformType() + + switch (platformType) { + case Platform.ENTERPRISE: { + if (!appServer.identityManager.isLicenseValid()) { + return {} + } else { + return { PLATFORM_TYPE: Platform.ENTERPRISE } + } + } + case Platform.CLOUD: { + return { PLATFORM_TYPE: Platform.CLOUD } + } + default: { + return { PLATFORM_TYPE: Platform.OPEN_SOURCE } + } + } + } catch (error) { + return {} + } +} + +export default { + getSettings +} diff --git a/packages/server/src/services/tools/index.ts b/packages/server/src/services/tools/index.ts index 0dbf69b7ff1..c30a505018c 100644 --- a/packages/server/src/services/tools/index.ts +++ b/packages/server/src/services/tools/index.ts @@ -1,25 +1,30 @@ import { StatusCodes } from 'http-status-codes' import { Tool } from '../../database/entities/Tool' +import { getAppVersion } from '../../utils' import { InternalFlowiseError } from '../../errors/internalFlowiseError' import { getErrorMessage } from '../../errors/utils' -import { getAppVersion } from '../../utils' +import { getWorkspaceSearchOptions } from '../../enterprise/utils/ControllerServiceUtils' import { getRunningExpressApp } from '../../utils/getRunningExpressApp' import { FLOWISE_METRIC_COUNTERS, FLOWISE_COUNTER_STATUS } from '../../Interface.Metrics' import { QueryRunner } from 'typeorm' import { validate } from 'uuid' -const createTool = async (requestBody: any): Promise => { +const createTool = async (requestBody: any, orgId: string): Promise => { try { const appServer = getRunningExpressApp() const newTool = new Tool() Object.assign(newTool, requestBody) const tool = await appServer.AppDataSource.getRepository(Tool).create(newTool) const dbResponse = await appServer.AppDataSource.getRepository(Tool).save(tool) - await appServer.telemetry.sendTelemetry('tool_created', { - version: await getAppVersion(), - toolId: dbResponse.id, - toolName: dbResponse.name - }) + await appServer.telemetry.sendTelemetry( + 'tool_created', + { + version: await getAppVersion(), + toolId: dbResponse.id, + toolName: dbResponse.name + }, + orgId + ) appServer.metricsProvider?.incrementCounter(FLOWISE_METRIC_COUNTERS.TOOL_CREATED, { status: FLOWISE_COUNTER_STATUS.SUCCESS }) return dbResponse } catch (error) { @@ -39,10 +44,10 @@ const deleteTool = async (toolId: string): Promise => { } } -const getAllTools = async (): Promise => { +const getAllTools = async (workspaceId?: string): Promise => { try { const appServer = getRunningExpressApp() - const dbResponse = await appServer.AppDataSource.getRepository(Tool).find() + const dbResponse = await appServer.AppDataSource.getRepository(Tool).findBy(getWorkspaceSearchOptions(workspaceId)) return dbResponse } catch (error) { throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `Error: toolsService.getAllTools - ${getErrorMessage(error)}`) @@ -75,7 +80,7 @@ const updateTool = async (toolId: string, toolBody: any): Promise => { } const updateTool = new Tool() Object.assign(updateTool, toolBody) - await appServer.AppDataSource.getRepository(Tool).merge(tool, updateTool) + appServer.AppDataSource.getRepository(Tool).merge(tool, updateTool) const dbResponse = await appServer.AppDataSource.getRepository(Tool).save(tool) return dbResponse } catch (error) { diff --git a/packages/server/src/services/validation/index.ts b/packages/server/src/services/validation/index.ts index 5ac4ea5286a..85cde6467d0 100644 --- a/packages/server/src/services/validation/index.ts +++ b/packages/server/src/services/validation/index.ts @@ -13,16 +13,18 @@ interface IValidationResult { issues: string[] } -const checkFlowValidation = async (flowId: string): Promise => { +const checkFlowValidation = async (flowId: string, workspaceId?: string): Promise => { try { const appServer = getRunningExpressApp() const componentNodes = appServer.nodesPool.componentNodes + // Create query conditions with workspace filtering if provided + const whereCondition: any = { id: flowId } + if (workspaceId) whereCondition.workspaceId = workspaceId + const flow = await appServer.AppDataSource.getRepository(ChatFlow).findOne({ - where: { - id: flowId - } + where: whereCondition }) if (!flow) { diff --git a/packages/server/src/services/variables/index.ts b/packages/server/src/services/variables/index.ts index d06e8c6c778..dcccddb6bf4 100644 --- a/packages/server/src/services/variables/index.ts +++ b/packages/server/src/services/variables/index.ts @@ -3,14 +3,25 @@ import { getRunningExpressApp } from '../../utils/getRunningExpressApp' import { Variable } from '../../database/entities/Variable' import { InternalFlowiseError } from '../../errors/internalFlowiseError' import { getErrorMessage } from '../../errors/utils' +import { getAppVersion } from '../../utils' +import { getWorkspaceSearchOptions } from '../../enterprise/utils/ControllerServiceUtils' import { QueryRunner } from 'typeorm' import { validate } from 'uuid' -const createVariable = async (newVariable: Variable) => { +const createVariable = async (newVariable: Variable, orgId: string) => { try { const appServer = getRunningExpressApp() + const variable = await appServer.AppDataSource.getRepository(Variable).create(newVariable) const dbResponse = await appServer.AppDataSource.getRepository(Variable).save(variable) + await appServer.telemetry.sendTelemetry( + 'variable_created', + { + version: await getAppVersion(), + variableType: variable.type + }, + orgId + ) return dbResponse } catch (error) { throw new InternalFlowiseError( @@ -33,10 +44,10 @@ const deleteVariable = async (variableId: string): Promise => { } } -const getAllVariables = async () => { +const getAllVariables = async (workspaceId?: string) => { try { const appServer = getRunningExpressApp() - const dbResponse = await appServer.AppDataSource.getRepository(Variable).find() + const dbResponse = await appServer.AppDataSource.getRepository(Variable).findBy(getWorkspaceSearchOptions(workspaceId)) return dbResponse } catch (error) { throw new InternalFlowiseError( diff --git a/packages/server/src/utils/addChatflowsCount.ts b/packages/server/src/utils/addChatflowsCount.ts index ede05121cf4..3b8d4c069fa 100644 --- a/packages/server/src/utils/addChatflowsCount.ts +++ b/packages/server/src/utils/addChatflowsCount.ts @@ -15,6 +15,7 @@ export const addChatflowsCount = async (keys: any) => { const chatflows = await appServer.AppDataSource.getRepository(ChatFlow) .createQueryBuilder('cf') .where('cf.apikeyid = :apikeyid', { apikeyid: key.id }) + .andWhere('cf.workspaceId = :workspaceId', { workspaceId: key.workspaceId }) .getMany() const linkedChatFlows: any[] = [] chatflows.map((cf) => { diff --git a/packages/server/src/utils/apiKey.ts b/packages/server/src/utils/apiKey.ts index a50b2b54a85..9aa5daa9b52 100644 --- a/packages/server/src/utils/apiKey.ts +++ b/packages/server/src/utils/apiKey.ts @@ -1,10 +1,14 @@ import { randomBytes, scryptSync, timingSafeEqual } from 'crypto' import { ICommonObject } from 'flowise-components' -import moment from 'moment' import fs from 'fs' import path from 'path' -import logger from './logger' -import { appConfig } from '../AppConfig' +import { DataSource } from 'typeorm' +import { ApiKey } from '../database/entities/ApiKey' +import { Workspace } from '../enterprise/database/entities/workspace.entity' +import { v4 as uuidv4 } from 'uuid' +import { ChatFlow } from '../database/entities/ChatFlow' +import { addChatflowsCount } from './addChatflowsCount' +import { Platform } from '../Interface' /** * Returns the api key path @@ -51,92 +55,12 @@ export const compareKeys = (storedKey: string, suppliedKey: string): boolean => * @returns {Promise} */ export const getAPIKeys = async (): Promise => { - if (appConfig.apiKeys.storageType !== 'json') { - return [] - } try { const content = await fs.promises.readFile(getAPIKeyPath(), 'utf8') return JSON.parse(content) } catch (error) { - const keyName = 'DefaultKey' - const apiKey = generateAPIKey() - const apiSecret = generateSecretHash(apiKey) - const content = [ - { - keyName, - apiKey, - apiSecret, - createdAt: moment().format('DD-MMM-YY'), - id: randomBytes(16).toString('hex') - } - ] - await fs.promises.writeFile(getAPIKeyPath(), JSON.stringify(content), 'utf8') - return content - } -} - -/** - * Add new API key - * @param {string} keyName - * @returns {Promise} - */ -export const addAPIKey = async (keyName: string): Promise => { - const existingAPIKeys = await getAPIKeys() - const apiKey = generateAPIKey() - const apiSecret = generateSecretHash(apiKey) - const content = [ - ...existingAPIKeys, - { - keyName, - apiKey, - apiSecret, - createdAt: moment().format('DD-MMM-YY'), - id: randomBytes(16).toString('hex') - } - ] - await fs.promises.writeFile(getAPIKeyPath(), JSON.stringify(content), 'utf8') - return content -} - -/** - * import API keys - * @param {[]} keys - * @returns {Promise} - */ -export const importKeys = async (keys: any[], importMode: string): Promise => { - const allApiKeys = await getAPIKeys() - // if importMode is errorIfExist, check for existing keys and raise error before any modification to the file - if (importMode === 'errorIfExist') { - for (const key of keys) { - const keyNameExists = allApiKeys.find((k) => k.keyName === key.keyName) - if (keyNameExists) { - throw new Error(`Key with name ${key.keyName} already exists`) - } - } - } - for (const key of keys) { - // Check if keyName already exists, if overwrite is false, raise an error else overwrite the key - const keyNameExists = allApiKeys.find((k) => k.keyName === key.keyName) - if (keyNameExists) { - const keyIndex = allApiKeys.findIndex((k) => k.keyName === key.keyName) - switch (importMode) { - case 'overwriteIfExist': - allApiKeys[keyIndex] = key - continue - case 'ignoreIfExist': - // ignore this key and continue - continue - case 'errorIfExist': - // should not reach here as we have already checked for existing keys - throw new Error(`Key with name ${key.keyName} already exists`) - default: - throw new Error(`Unknown overwrite option ${importMode}`) - } - } - allApiKeys.push(key) + return [] } - await fs.promises.writeFile(getAPIKeyPath(), JSON.stringify(allApiKeys), 'utf8') - return allApiKeys } /** @@ -151,42 +75,82 @@ export const getApiKey = async (apiKey: string) => { return existingAPIKeys[keyIndex] } -/** - * Update existing API key - * @param {string} keyIdToUpdate - * @param {string} newKeyName - * @returns {Promise} - */ -export const updateAPIKey = async (keyIdToUpdate: string, newKeyName: string): Promise => { - const existingAPIKeys = await getAPIKeys() - const keyIndex = existingAPIKeys.findIndex((key) => key.id === keyIdToUpdate) - if (keyIndex < 0) return [] - existingAPIKeys[keyIndex].keyName = newKeyName - await fs.promises.writeFile(getAPIKeyPath(), JSON.stringify(existingAPIKeys), 'utf8') - return existingAPIKeys -} +export const migrateApiKeysFromJsonToDb = async (appDataSource: DataSource, platformType: Platform) => { + if (platformType === Platform.CLOUD) { + return + } -/** - * Delete API key - * @param {string} keyIdToDelete - * @returns {Promise} - */ -export const deleteAPIKey = async (keyIdToDelete: string): Promise => { - const existingAPIKeys = await getAPIKeys() - const result = existingAPIKeys.filter((key) => key.id !== keyIdToDelete) - await fs.promises.writeFile(getAPIKeyPath(), JSON.stringify(result), 'utf8') - return result -} + if (!process.env.APIKEY_STORAGE_TYPE || process.env.APIKEY_STORAGE_TYPE === 'json') { + const keys = await getAPIKeys() + if (keys.length > 0) { + try { + // Get all available workspaces + const workspaces = await appDataSource.getRepository(Workspace).find() -/** - * Replace all api keys - * @param {ICommonObject[]} content - * @returns {Promise} - */ -export const replaceAllAPIKeys = async (content: ICommonObject[]): Promise => { - try { - await fs.promises.writeFile(getAPIKeyPath(), JSON.stringify(content), 'utf8') - } catch (error) { - logger.error(error) + for (const key of keys) { + const existingKey = await appDataSource.getRepository(ApiKey).findOneBy({ + apiKey: key.apiKey + }) + + // Only add if key doesn't already exist in DB + if (!existingKey) { + // Create a new API key for each workspace + if (workspaces.length > 0) { + for (const workspace of workspaces) { + const newKey = new ApiKey() + newKey.id = uuidv4() + newKey.apiKey = key.apiKey + newKey.apiSecret = key.apiSecret + newKey.keyName = key.keyName + newKey.workspaceId = workspace.id + + const keyEntity = appDataSource.getRepository(ApiKey).create(newKey) + await appDataSource.getRepository(ApiKey).save(keyEntity) + + const chatflows = await appDataSource.getRepository(ChatFlow).findBy({ + apikeyid: key.id, + workspaceId: workspace.id + }) + + for (const chatflow of chatflows) { + chatflow.apikeyid = newKey.id + await appDataSource.getRepository(ChatFlow).save(chatflow) + } + + await addChatflowsCount(chatflows) + } + } else { + // If no workspaces exist, create the key without a workspace ID and later will be updated by setNullWorkspaceId + const newKey = new ApiKey() + newKey.id = uuidv4() + newKey.apiKey = key.apiKey + newKey.apiSecret = key.apiSecret + newKey.keyName = key.keyName + + const keyEntity = appDataSource.getRepository(ApiKey).create(newKey) + await appDataSource.getRepository(ApiKey).save(keyEntity) + + const chatflows = await appDataSource.getRepository(ChatFlow).findBy({ + apikeyid: key.id + }) + + for (const chatflow of chatflows) { + chatflow.apikeyid = newKey.id + await appDataSource.getRepository(ChatFlow).save(chatflow) + } + + await addChatflowsCount(chatflows) + } + } + } + + // Delete the JSON file + if (fs.existsSync(getAPIKeyPath())) { + fs.unlinkSync(getAPIKeyPath()) + } + } catch (error) { + console.error('Error migrating API keys from JSON to DB', error) + } + } } } diff --git a/packages/server/src/utils/buildAgentGraph.ts b/packages/server/src/utils/buildAgentGraph.ts index 9097a3e2bcb..e79cd6947cb 100644 --- a/packages/server/src/utils/buildAgentGraph.ts +++ b/packages/server/src/utils/buildAgentGraph.ts @@ -26,6 +26,7 @@ import { InternalFlowiseError } from '../errors/internalFlowiseError' import { getErrorMessage } from '../errors/utils' import logger from './logger' import { Variable } from '../database/entities/Variable' +import { getWorkspaceSearchOptions } from '../enterprise/utils/ControllerServiceUtils' import { DataSource } from 'typeorm' import { CachePool } from '../CachePool' @@ -50,7 +51,9 @@ export const buildAgentGraph = async ({ shouldStreamResponse, cachePool, baseURL, - signal + signal, + orgId, + workspaceId }: { agentflow: IChatFlow flowConfig: IFlowConfig @@ -70,6 +73,8 @@ export const buildAgentGraph = async ({ cachePool: CachePool baseURL: string signal?: AbortController + orgId: string + workspaceId?: string }): Promise => { try { const chatflowid = flowConfig.chatflowid @@ -79,6 +84,8 @@ export const buildAgentGraph = async ({ const uploads = incomingInput.uploads const options = { + orgId, + workspaceId, chatId, sessionId, chatflowid, @@ -384,7 +391,7 @@ export const buildAgentGraph = async ({ } } catch (e) { // clear agent memory because checkpoints were saved during runtime - await clearSessionMemory(nodes, componentNodes, chatId, appDataSource, sessionId) + await clearSessionMemory(nodes, componentNodes, chatId, appDataSource, orgId, sessionId) if (getErrorMessage(e).includes('Aborted')) { if (shouldStreamResponse && sseStreamer) { sseStreamer.streamAbortEvent(chatId) @@ -395,7 +402,7 @@ export const buildAgentGraph = async ({ } return streamResults } catch (e) { - logger.error('[server]: Error:', e) + logger.error(`[server]: [${orgId}]: Error:`, e) throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `Error buildAgentGraph - ${getErrorMessage(e)}`) } } @@ -457,7 +464,7 @@ const compileMultiAgentsGraph = async (params: MultiAgentsGraphParams) => { const workerNodes = reactFlowNodes.filter((node) => workerNodeIds.includes(node.data.id)) /*** Get API Config ***/ - const availableVariables = await appDataSource.getRepository(Variable).find() + const availableVariables = await appDataSource.getRepository(Variable).findBy(getWorkspaceSearchOptions(agentflow.workspaceId)) const { nodeOverrides, variableOverrides, apiOverrideStatus } = getAPIOverrideConfig(agentflow) let supervisorWorkers: { [key: string]: IMultiAgentNode[] } = {} @@ -566,7 +573,7 @@ const compileMultiAgentsGraph = async (params: MultiAgentsGraphParams) => { const graph = workflowGraph.compile({ checkpointer: memory }) - const loggerHandler = new ConsoleCallbackHandler(logger) + const loggerHandler = new ConsoleCallbackHandler(logger, options?.orgId) const callbacks = await additionalCallbacks(flowNodeData, options) const config = { configurable: { thread_id: threadId } } @@ -686,7 +693,7 @@ const compileSeqAgentsGraph = async (params: SeqAgentsGraphParams) => { let interruptToolNodeNames = [] /*** Get API Config ***/ - const availableVariables = await appDataSource.getRepository(Variable).find() + const availableVariables = await appDataSource.getRepository(Variable).findBy(getWorkspaceSearchOptions(agentflow.workspaceId)) const { nodeOverrides, variableOverrides, apiOverrideStatus } = getAPIOverrideConfig(agentflow) const initiateNode = async (node: IReactFlowNode) => { @@ -996,7 +1003,7 @@ const compileSeqAgentsGraph = async (params: SeqAgentsGraphParams) => { interruptBefore: interruptToolNodeNames as any }) - const loggerHandler = new ConsoleCallbackHandler(logger) + const loggerHandler = new ConsoleCallbackHandler(logger, options?.orgId) const callbacks = await additionalCallbacks(flowNodeData as any, options) const config = { configurable: { thread_id: threadId }, bindModel } @@ -1044,7 +1051,7 @@ const compileSeqAgentsGraph = async (params: SeqAgentsGraphParams) => { configurable: config }) } catch (e) { - logger.error('Error compile graph', e) + logger.error(`[${options.orgId}]: Error compile graph`, e) throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `Error compile graph - ${getErrorMessage(e)}`) } } diff --git a/packages/server/src/utils/buildAgentflow.ts b/packages/server/src/utils/buildAgentflow.ts index 28c117057c6..4ce91285a57 100644 --- a/packages/server/src/utils/buildAgentflow.ts +++ b/packages/server/src/utils/buildAgentflow.ts @@ -52,6 +52,8 @@ import { utilAddChatMessage } from './addChatMesage' import { CachePool } from '../CachePool' import { ChatMessage } from '../database/entities/ChatMessage' import { Telemetry } from './telemetry' +import { getWorkspaceSearchOptions } from '../enterprise/utils/ControllerServiceUtils' +import { UsageCacheManager } from '../UsageCacheManager' interface IWaitingNode { nodeId: string @@ -99,9 +101,11 @@ interface IExecuteNodeParams { chatId: string sessionId: string apiMessageId: string + evaluationRunId?: string isInternal: boolean pastChatHistory: IMessage[] appDataSource: DataSource + usageCacheManager: UsageCacheManager telemetry: Telemetry componentNodes: IComponentNodes cachePool: CachePool @@ -122,6 +126,9 @@ interface IExecuteNodeParams { parentExecutionId?: string isRecursive?: boolean iterationContext?: ICommonObject + orgId: string + workspaceId: string + subscriptionId: string } interface IExecuteAgentFlowParams extends Omit { @@ -142,13 +149,15 @@ const addExecution = async ( appDataSource: DataSource, agentflowId: string, agentFlowExecutedData: IAgentflowExecutedData[], - sessionId: string + sessionId: string, + workspaceId: string ) => { const newExecution = new Execution() const bodyExecution = { agentflowId, state: 'INPROGRESS', sessionId, + workspaceId, executionData: JSON.stringify(agentFlowExecutedData) } Object.assign(newExecution, bodyExecution) @@ -164,9 +173,10 @@ const addExecution = async ( * @param {Partial} data * @returns {Promise} */ -const updateExecution = async (appDataSource: DataSource, executionId: string, data?: Partial) => { +const updateExecution = async (appDataSource: DataSource, executionId: string, workspaceId: string, data?: Partial) => { const execution = await appDataSource.getRepository(Execution).findOneBy({ - id: executionId + id: executionId, + workspaceId }) if (!execution) { @@ -325,6 +335,30 @@ export const resolveVariables = async ( } } + // Check if the variable is an output reference like `nodeId.output.path` + const outputMatch = variableFullPath.match(/^(.*?)\.output\.(.+)$/) + if (outputMatch && agentFlowExecutedData) { + // Extract nodeId and outputPath from the match + const [, nodeIdPart, outputPath] = outputMatch + // Clean nodeId (handle escaped underscores) + const cleanNodeId = nodeIdPart.replace('\\', '') + // Find the last (most recent) matching node data instead of the first one + const nodeData = [...agentFlowExecutedData].reverse().find((d) => d.nodeId === cleanNodeId) + if (nodeData?.data?.output && outputPath.trim()) { + const variableValue = get(nodeData.data.output, outputPath) + if (variableValue !== undefined) { + // Replace the reference with actual value + const formattedValue = + Array.isArray(variableValue) || (typeof variableValue === 'object' && variableValue !== null) + ? JSON.stringify(variableValue) + : String(variableValue) + resolvedValue = resolvedValue.replace(match, formattedValue) + // Skip fallback logic + continue + } + } + } + // Find node data in executed data // sometimes turndown value returns a backslash like `llmAgentflow\_1`, remove the backslash const cleanNodeId = variableFullPath.replace('\\', '') @@ -334,7 +368,8 @@ export const resolveVariables = async ( : undefined if (nodeData && nodeData.data) { // Replace the reference with actual value - const actualValue = (nodeData.data['output'] as ICommonObject)?.content + const nodeOutput = nodeData.data['output'] as ICommonObject + const actualValue = nodeOutput?.content ?? nodeOutput?.http?.data // For arrays and objects, stringify them to prevent toString() conversion issues const formattedValue = Array.isArray(actualValue) || (typeof actualValue === 'object' && actualValue !== null) @@ -770,9 +805,11 @@ const executeNode = async ({ chatId, sessionId, apiMessageId, + evaluationRunId, parentExecutionId, pastChatHistory, appDataSource, + usageCacheManager, telemetry, componentNodes, cachePool, @@ -792,7 +829,10 @@ const executeNode = async ({ analyticHandlers, isInternal, isRecursive, - iterationContext + iterationContext, + orgId, + workspaceId, + subscriptionId }: IExecuteNodeParams): Promise<{ result: any shouldStop?: boolean @@ -824,7 +864,7 @@ const executeNode = async ({ } // Get available variables and resolve them - const availableVariables = await appDataSource.getRepository(Variable).find() + const availableVariables = await appDataSource.getRepository(Variable).findBy(getWorkspaceSearchOptions(workspaceId)) // Prepare flow config let updatedState = cloneDeep(agentflowRuntime.state) @@ -902,6 +942,9 @@ const executeNode = async ({ // Prepare run parameters const runParams = { + orgId, + workspaceId, + subscriptionId, chatId, sessionId, chatflowid: chatflow.id, @@ -909,6 +952,7 @@ const executeNode = async ({ logger, appDataSource, databaseEntities, + usageCacheManager, componentNodes, cachePool, analytic: chatflow.analytic, @@ -922,7 +966,8 @@ const executeNode = async ({ analyticHandlers, parentTraceIds, humanInputAction, - iterationContext + iterationContext, + evaluationRunId } // Execute node @@ -982,7 +1027,9 @@ const executeNode = async ({ incomingInput, chatflow: iterationChatflow, chatId, + evaluationRunId, appDataSource, + usageCacheManager, telemetry, cachePool, sseStreamer, @@ -996,7 +1043,10 @@ const executeNode = async ({ iterationContext: { ...iterationContext, agentflowRuntime - } + }, + orgId, + workspaceId, + subscriptionId }) // Store the result @@ -1023,7 +1073,7 @@ const executeNode = async ({ if (parentExecutionId) { try { logger.debug(` 📝 Updating parent execution ${parentExecutionId} with iteration ${i + 1} data`) - await updateExecution(appDataSource, parentExecutionId, { + await updateExecution(appDataSource, parentExecutionId, workspaceId, { executionData: JSON.stringify(agentFlowExecutedData) }) } catch (error) { @@ -1192,8 +1242,10 @@ export const executeAgentFlow = async ({ incomingInput, chatflow, chatId, + evaluationRunId, appDataSource, telemetry, + usageCacheManager, cachePool, sseStreamer, baseURL, @@ -1204,7 +1256,10 @@ export const executeAgentFlow = async ({ isRecursive = false, parentExecutionId, iterationContext, - isTool = false + isTool = false, + orgId, + workspaceId, + subscriptionId }: IExecuteAgentFlowParams) => { logger.debug('\n🚀 Starting flow execution') @@ -1281,7 +1336,8 @@ export const executeAgentFlow = async ({ const previousExecutions = await appDataSource.getRepository(Execution).find({ where: { sessionId, - agentflowId: chatflowid + agentflowId: chatflowid, + workspaceId }, order: { createdDate: 'DESC' @@ -1293,6 +1349,24 @@ export const executeAgentFlow = async ({ } } + // If the state is persistent, get the state from the previous execution + const startPersistState = nodes.find((node) => node.data.name === 'startAgentflow')?.data.inputs?.startPersistState + if (startPersistState === true && previousExecution) { + const previousExecutionData = (JSON.parse(previousExecution.executionData) as IAgentflowExecutedData[]) ?? [] + + let previousState = {} + if (Array.isArray(previousExecutionData) && previousExecutionData.length) { + for (const execData of previousExecutionData.reverse()) { + if (execData.data.state) { + previousState = execData.data.state + break + } + } + } + + agentflowRuntime.state = previousState + } + // If the start input type is form input, get the form values from the previous execution (form values are persisted in the same session) if (startInputType === 'formInput' && previousExecution) { const previousExecutionData = (JSON.parse(previousExecution.executionData) as IAgentflowExecutedData[]) ?? [] @@ -1344,7 +1418,7 @@ export const executeAgentFlow = async ({ agentflowRuntime.state = (lastState as ICommonObject) ?? {} // Update execution state to INPROGRESS - await updateExecution(appDataSource, previousExecution.id, { + await updateExecution(appDataSource, previousExecution.id, workspaceId, { state: 'INPROGRESS' }) newExecution = previousExecution @@ -1357,7 +1431,7 @@ export const executeAgentFlow = async ({ // For recursive calls with a valid parent execution ID, don't create a new execution // Instead, fetch the parent execution to use it const parentExecution = await appDataSource.getRepository(Execution).findOne({ - where: { id: parentExecutionId } + where: { id: parentExecutionId, workspaceId } }) if (parentExecution) { @@ -1365,7 +1439,7 @@ export const executeAgentFlow = async ({ newExecution = parentExecution } else { console.warn(` ⚠️ Parent execution ID ${parentExecutionId} not found, will create new execution`) - newExecution = await addExecution(appDataSource, chatflowid, agentFlowExecutedData, sessionId) + newExecution = await addExecution(appDataSource, chatflowid, agentFlowExecutedData, sessionId, workspaceId) parentExecutionId = newExecution.id } } else { @@ -1374,7 +1448,7 @@ export const executeAgentFlow = async ({ checkForMultipleStartNodes(startingNodeIds, isRecursive, nodes) // Only create a new execution if this is not a recursive call - newExecution = await addExecution(appDataSource, chatflowid, agentFlowExecutedData, sessionId) + newExecution = await addExecution(appDataSource, chatflowid, agentFlowExecutedData, sessionId, workspaceId) parentExecutionId = newExecution.id } @@ -1430,6 +1504,8 @@ export const executeAgentFlow = async ({ try { if (chatflow.analytic) { analyticHandlers = AnalyticHandler.getInstance({ inputs: {} } as any, { + orgId, + workspaceId, appDataSource, databaseEntities, componentNodes, @@ -1486,10 +1562,12 @@ export const executeAgentFlow = async ({ chatId, sessionId, apiMessageId, + evaluationRunId, parentExecutionId, isInternal, pastChatHistory, appDataSource, + usageCacheManager, telemetry, componentNodes, cachePool, @@ -1508,7 +1586,10 @@ export const executeAgentFlow = async ({ parentTraceIds, analyticHandlers, isRecursive, - iterationContext + iterationContext, + orgId, + workspaceId, + subscriptionId }) if (executionResult.agentFlowExecutedData) { @@ -1607,7 +1688,7 @@ export const executeAgentFlow = async ({ if (!isRecursive) { sseStreamer?.streamAgentFlowExecutedDataEvent(chatId, agentFlowExecutedData) - await updateExecution(appDataSource, newExecution.id, { + await updateExecution(appDataSource, newExecution.id, workspaceId, { executionData: JSON.stringify(agentFlowExecutedData), state: errorStatus }) @@ -1642,7 +1723,7 @@ export const executeAgentFlow = async ({ // Only update execution record if this is not a recursive call if (!isRecursive) { - await updateExecution(appDataSource, newExecution.id, { + await updateExecution(appDataSource, newExecution.id, workspaceId, { executionData: JSON.stringify(agentFlowExecutedData), state: status }) @@ -1744,6 +1825,8 @@ export const executeAgentFlow = async ({ if (chatflow.followUpPrompts) { const followUpPromptsConfig = JSON.parse(chatflow.followUpPrompts) const followUpPrompts = await generateFollowUpPrompts(followUpPromptsConfig, apiMessage.content, { + orgId, + workspaceId, chatId, chatflowid, appDataSource, @@ -1760,13 +1843,17 @@ export const executeAgentFlow = async ({ logger.debug(`[server]: Finished running agentflow ${chatflowid}`) - await telemetry.sendTelemetry('prediction_sent', { - version: await getAppVersion(), - chatflowId: chatflowid, - chatId, - type: isInternal ? ChatType.INTERNAL : ChatType.EXTERNAL, - flowGraph: getTelemetryFlowObj(nodes, edges) - }) + await telemetry.sendTelemetry( + 'prediction_sent', + { + version: await getAppVersion(), + chatflowId: chatflowid, + chatId, + type: isInternal ? ChatType.INTERNAL : ChatType.EXTERNAL, + flowGraph: getTelemetryFlowObj(nodes, edges) + }, + orgId + ) /*** Prepare response ***/ let result: ICommonObject = {} diff --git a/packages/server/src/utils/buildChatflow.ts b/packages/server/src/utils/buildChatflow.ts index d0b5e0fbef0..b138922ea76 100644 --- a/packages/server/src/utils/buildChatflow.ts +++ b/packages/server/src/utils/buildChatflow.ts @@ -15,6 +15,7 @@ import { mapExtToInputField, getFileFromUpload, removeSpecificFileFromUpload, + EvaluationRunner, handleEscapeCharacters } from 'flowise-components' import { StatusCodes } from 'http-status-codes' @@ -22,8 +23,8 @@ import { IncomingInput, IMessage, INodeData, - IReactFlowObject, IReactFlowNode, + IReactFlowObject, IDepthQueue, ChatType, IChatMessage, @@ -59,11 +60,15 @@ import { import { validateChatflowAPIKey } from './validateKey' import logger from './logger' import { utilAddChatMessage } from './addChatMesage' +import { checkPredictions, checkStorage, updatePredictionsUsage, updateStorageUsage } from './quotaUsage' import { buildAgentGraph } from './buildAgentGraph' import { getErrorMessage } from '../errors/utils' import { FLOWISE_METRIC_COUNTERS, FLOWISE_COUNTER_STATUS, IMetricsProvider } from '../Interface.Metrics' +import { getWorkspaceSearchOptions } from '../enterprise/utils/ControllerServiceUtils' import { OMIT_QUEUE_JOB_DATA } from './constants' import { executeAgentFlow } from './buildAgentflow' +import { Workspace } from '../enterprise/database/entities/workspace.entity' +import { Organization } from '../enterprise/database/entities/organization.entity' /* * Initialize the ending node to be executed @@ -230,15 +235,21 @@ export const executeFlow = async ({ incomingInput, chatflow, chatId, + isEvaluation, + evaluationRunId, appDataSource, telemetry, cachePool, + usageCacheManager, sseStreamer, baseURL, isInternal, files, signal, - isTool + isTool, + orgId, + workspaceId, + subscriptionId }: IExecuteFlowParams) => { // Ensure incomingInput has all required properties with default values incomingInput = { @@ -265,6 +276,8 @@ export const executeFlow = async ({ if (uploads) { fileUploads = uploads for (let i = 0; i < fileUploads.length; i += 1) { + await checkStorage(orgId, subscriptionId, usageCacheManager) + const upload = fileUploads[i] // if upload in an image, a rag file, or audio @@ -273,7 +286,8 @@ export const executeFlow = async ({ const splitDataURI = upload.data.split(',') const bf = Buffer.from(splitDataURI.pop() || '', 'base64') const mime = splitDataURI[0].split(':')[1].split(';')[0] - await addSingleFileToStorage(mime, bf, filename, chatflowid, chatId) + const { totalSize } = await addSingleFileToStorage(mime, bf, filename, orgId, chatflowid, chatId) + await updateStorageUsage(orgId, workspaceId, totalSize, usageCacheManager) upload.type = 'stored-file' // Omit upload.data since we don't store the content in database fileUploads[i] = omit(upload, ['data']) @@ -287,7 +301,7 @@ export const executeFlow = async ({ // Run Speech to Text conversion if (upload.mime === 'audio/webm' || upload.mime === 'audio/mp4' || upload.mime === 'audio/ogg') { - logger.debug(`Attempting a speech to text conversion...`) + logger.debug(`[server]: [${orgId}]: Attempting a speech to text conversion...`) let speechToTextConfig: ICommonObject = {} if (chatflow.speechToText) { const speechToTextProviders = JSON.parse(chatflow.speechToText) @@ -302,13 +316,14 @@ export const executeFlow = async ({ } if (speechToTextConfig) { const options: ICommonObject = { + orgId, chatId, chatflowid, appDataSource, databaseEntities: databaseEntities } const speechToTextResult = await convertSpeechToText(upload, speechToTextConfig, options) - logger.debug(`Speech to text result: ${speechToTextResult}`) + logger.debug(`[server]: [${orgId}]: Speech to text result: ${speechToTextResult}`) if (speechToTextResult) { incomingInput.question = speechToTextResult question = speechToTextResult @@ -329,11 +344,21 @@ export const executeFlow = async ({ if (files?.length) { overrideConfig = { ...incomingInput } for (const file of files) { + await checkStorage(orgId, subscriptionId, usageCacheManager) + const fileNames: string[] = [] const fileBuffer = await getFileFromUpload(file.path ?? file.key) // Address file name with special characters: https://github.com/expressjs/multer/issues/1104 file.originalname = Buffer.from(file.originalname, 'latin1').toString('utf8') - const storagePath = await addArrayFilesToStorage(file.mimetype, fileBuffer, file.originalname, fileNames, chatflowid) + const { path: storagePath, totalSize } = await addArrayFilesToStorage( + file.mimetype, + fileBuffer, + file.originalname, + fileNames, + orgId, + chatflowid + ) + await updateStorageUsage(orgId, workspaceId, totalSize, usageCacheManager) const fileInputFieldFromMimeType = mapMimeTypeToInputField(file.mimetype) @@ -382,16 +407,21 @@ export const executeFlow = async ({ incomingInput, chatflow, chatId, + evaluationRunId, appDataSource, telemetry, cachePool, + usageCacheManager, sseStreamer, baseURL, isInternal, uploadedFilesContent, fileUploads, signal, - isTool + isTool, + orgId, + workspaceId, + subscriptionId }) } @@ -443,7 +473,7 @@ export const executeFlow = async ({ }) /*** Get API Config ***/ - const availableVariables = await appDataSource.getRepository(Variable).find() + const availableVariables = await appDataSource.getRepository(Variable).findBy(getWorkspaceSearchOptions(workspaceId)) const { nodeOverrides, variableOverrides, apiOverrideStatus } = getAPIOverrideConfig(chatflow) const flowConfig: IFlowConfig = { @@ -455,7 +485,7 @@ export const executeFlow = async ({ ...incomingInput.overrideConfig } - logger.debug(`[server]: Start building flow ${chatflowid}`) + logger.debug(`[server]: [${orgId}]: Start building flow ${chatflowid}`) /*** BFS to traverse from Starting Nodes to Ending Node ***/ const reactFlowNodes = await buildFlow({ @@ -479,9 +509,13 @@ export const executeFlow = async ({ availableVariables, variableOverrides, cachePool, + usageCacheManager, isUpsert: false, uploads, - baseURL + baseURL, + orgId, + workspaceId, + subscriptionId }) const setVariableNodesOutput = getSetVariableNodesOutput(reactFlowNodes) @@ -506,7 +540,9 @@ export const executeFlow = async ({ shouldStreamResponse: true, // agentflow is always streamed cachePool, baseURL, - signal + signal, + orgId, + workspaceId }) if (streamResults) { @@ -556,13 +592,17 @@ export const executeFlow = async ({ } const chatMessage = await utilAddChatMessage(apiMessage, appDataSource) - await telemetry.sendTelemetry('agentflow_prediction_sent', { - version: await getAppVersion(), - agentflowId: agentflow.id, - chatId, - type: isInternal ? ChatType.INTERNAL : ChatType.EXTERNAL, - flowGraph: getTelemetryFlowObj(nodes, edges) - }) + await telemetry.sendTelemetry( + 'agentflow_prediction_sent', + { + version: await getAppVersion(), + agentflowId: agentflow.id, + chatId, + type: isInternal ? ChatType.INTERNAL : ChatType.EXTERNAL, + flowGraph: getTelemetryFlowObj(nodes, edges) + }, + orgId + ) // Find the previous chat message with the same action id and remove the action if (incomingInput.action && Object.keys(incomingInput.action).length) { @@ -596,6 +636,7 @@ export const executeFlow = async ({ // Prepare response let result: ICommonObject = {} result.text = finalResult + result.question = incomingInput.question result.chatId = chatId result.chatMessageId = chatMessage?.id @@ -605,7 +646,6 @@ export const executeFlow = async ({ if (finalAction && Object.keys(finalAction).length) result.action = finalAction if (Object.keys(setVariableNodesOutput).length) result.flowVariables = setVariableNodesOutput result.followUpPrompts = JSON.stringify(apiMessage.followUpPrompts) - return result } return undefined @@ -643,16 +683,23 @@ export const executeFlow = async ({ /*** Prepare run params ***/ const runParams = { + orgId, + workspaceId, + subscriptionId, chatId, chatflowid, apiMessageId, logger, appDataSource, databaseEntities, + usageCacheManager, analytic: chatflow.analytic, uploads, prependMessages, - ...(isStreamValid && { sseStreamer, shouldStreamResponse: isStreamValid }) + ...(isStreamValid && { sseStreamer, shouldStreamResponse: isStreamValid }), + evaluationRunId, + updateStorageUsage, + checkStorage } /*** Run the ending node ***/ @@ -669,7 +716,7 @@ export const executeFlow = async ({ role: 'userMessage', content: question, chatflowid, - chatType: isInternal ? ChatType.INTERNAL : ChatType.EXTERNAL, + chatType: isEvaluation ? ChatType.EVALUATION : isInternal ? ChatType.INTERNAL : ChatType.EXTERNAL, chatId, memoryType, sessionId, @@ -725,7 +772,7 @@ export const executeFlow = async ({ role: 'apiMessage', content: resultText, chatflowid, - chatType: isInternal ? ChatType.INTERNAL : ChatType.EXTERNAL, + chatType: isEvaluation ? ChatType.EVALUATION : isInternal ? ChatType.INTERNAL : ChatType.EXTERNAL, chatId, memoryType, sessionId @@ -749,15 +796,22 @@ export const executeFlow = async ({ const chatMessage = await utilAddChatMessage(apiMessage, appDataSource) - logger.debug(`[server]: Finished running ${endingNodeData.label} (${endingNodeData.id})`) - - await telemetry.sendTelemetry('prediction_sent', { - version: await getAppVersion(), - chatflowId: chatflowid, - chatId, - type: isInternal ? ChatType.INTERNAL : ChatType.EXTERNAL, - flowGraph: getTelemetryFlowObj(nodes, edges) - }) + logger.debug(`[server]: [${orgId}]: Finished running ${endingNodeData.label} (${endingNodeData.id})`) + if (evaluationRunId) { + const metrics = await EvaluationRunner.getAndDeleteMetrics(evaluationRunId) + result.metrics = metrics + } + await telemetry.sendTelemetry( + 'prediction_sent', + { + version: await getAppVersion(), + chatflowId: chatflowid, + chatId, + type: isInternal ? ChatType.INTERNAL : ChatType.EXTERNAL, + flowGraph: getTelemetryFlowObj(nodes, edges) + }, + orgId + ) /*** Prepare response ***/ result.question = incomingInput.question // return the question in the response, this is used when input text is empty but question is in audio format @@ -830,6 +884,7 @@ const checkIfStreamValid = async ( */ export const utilBuildChatflow = async (req: Request, isInternal: boolean = false): Promise => { const appServer = getRunningExpressApp() + const chatflowid = req.params.id // Check if chatflow exists @@ -841,7 +896,6 @@ export const utilBuildChatflow = async (req: Request, isInternal: boolean = fals } const isAgentFlow = chatflow.type === 'MULTIAGENT' - const httpProtocol = req.get('x-forwarded-proto') || req.protocol const baseURL = `${httpProtocol}://${req.get('host')}` const incomingInput: IncomingInput = req.body || {} // Ensure incomingInput is never undefined @@ -849,6 +903,20 @@ export const utilBuildChatflow = async (req: Request, isInternal: boolean = fals const files = (req.files as Express.Multer.File[]) || [] const abortControllerId = `${chatflow.id}_${chatId}` const isTool = req.get('flowise-tool') === 'true' + const isEvaluation: boolean = req.headers['X-Flowise-Evaluation'] || req.body.evaluation + let evaluationRunId = '' + if (isEvaluation) { + evaluationRunId = req.body.evaluationRunId + if (evaluationRunId) { + const newEval = { + evaluation: { + status: true, + evaluationRunId + } + } + chatflow.analytic = JSON.stringify(newEval) + } + } try { // Validate API Key if its external API request @@ -859,6 +927,28 @@ export const utilBuildChatflow = async (req: Request, isInternal: boolean = fals } } + // This can be public API, so we can only get orgId from the chatflow + const chatflowWorkspaceId = chatflow.workspaceId + const workspace = await appServer.AppDataSource.getRepository(Workspace).findOneBy({ + id: chatflowWorkspaceId + }) + if (!workspace) { + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Workspace ${chatflowWorkspaceId} not found`) + } + const workspaceId = workspace.id + + const org = await appServer.AppDataSource.getRepository(Organization).findOneBy({ + id: workspace.organizationId + }) + if (!org) { + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Organization ${workspace.organizationId} not found`) + } + + const orgId = org.id + const subscriptionId = org.subscriptionId as string + + await checkPredictions(orgId, subscriptionId, appServer.usageCacheManager) + const executeData: IExecuteFlowParams = { incomingInput, // Use the defensively created incomingInput variable chatflow, @@ -866,18 +956,24 @@ export const utilBuildChatflow = async (req: Request, isInternal: boolean = fals baseURL, isInternal, files, + isEvaluation, + evaluationRunId, appDataSource: appServer.AppDataSource, sseStreamer: appServer.sseStreamer, telemetry: appServer.telemetry, cachePool: appServer.cachePool, componentNodes: appServer.nodesPool.componentNodes, - isTool // used to disable streaming if incoming request its from ChatflowTool + isTool, // used to disable streaming if incoming request its from ChatflowTool + usageCacheManager: appServer.usageCacheManager, + orgId, + workspaceId, + subscriptionId } if (process.env.MODE === MODE.QUEUE) { const predictionQueue = appServer.queueManager.getQueue('prediction') const job = await predictionQueue.addJob(omit(executeData, OMIT_QUEUE_JOB_DATA)) - logger.debug(`[server]: Job added to queue: ${job.id}`) + logger.debug(`[server]: [${orgId}]: Job added to queue: ${job.id}`) const queueEvents = predictionQueue.getQueueEvents() const result = await job.waitUntilFinished(queueEvents) @@ -885,7 +981,7 @@ export const utilBuildChatflow = async (req: Request, isInternal: boolean = fals if (!result) { throw new Error('Job execution failed') } - + await updatePredictionsUsage(orgId, subscriptionId, workspaceId, appServer.usageCacheManager) incrementSuccessMetricCounter(appServer.metricsProvider, isInternal, isAgentFlow) return result } else { @@ -893,9 +989,11 @@ export const utilBuildChatflow = async (req: Request, isInternal: boolean = fals const signal = new AbortController() appServer.abortControllerPool.add(abortControllerId, signal) executeData.signal = signal + const result = await executeFlow(executeData) appServer.abortControllerPool.remove(abortControllerId) + await updatePredictionsUsage(orgId, subscriptionId, workspaceId, appServer.usageCacheManager) incrementSuccessMetricCounter(appServer.metricsProvider, isInternal, isAgentFlow) return result } diff --git a/packages/server/src/utils/constants.ts b/packages/server/src/utils/constants.ts index 247446057ee..3d3d6796697 100644 --- a/packages/server/src/utils/constants.ts +++ b/packages/server/src/utils/constants.ts @@ -1,3 +1,8 @@ +import Auth0SSO from '../enterprise/sso/Auth0SSO' +import AzureSSO from '../enterprise/sso/AzureSSO' +import GithubSSO from '../enterprise/sso/GithubSSO' +import GoogleSSO from '../enterprise/sso/GoogleSSO' + export const WHITELIST_URLS = [ '/api/v1/verify/apikey/', '/api/v1/chatflows/apikey/', @@ -19,10 +24,63 @@ export const WHITELIST_URLS = [ '/api/v1/version', '/api/v1/attachments', '/api/v1/metrics', - '/api/v1/nvidia-nim' + '/api/v1/nvidia-nim', + '/api/v1/auth/resolve', + '/api/v1/auth/login', + '/api/v1/auth/refreshToken', + '/api/v1/settings', + '/api/v1/account/logout', + '/api/v1/account/verify', + '/api/v1/account/register', + '/api/v1/account/resend-verification', + '/api/v1/account/forgot-password', + '/api/v1/account/reset-password', + '/api/v1/account/basic-auth', + '/api/v1/loginmethod', + '/api/v1/pricing', + '/api/v1/user/test', + AzureSSO.LOGIN_URI, + AzureSSO.LOGOUT_URI, + AzureSSO.CALLBACK_URI, + GoogleSSO.LOGIN_URI, + GoogleSSO.LOGOUT_URI, + GoogleSSO.CALLBACK_URI, + Auth0SSO.LOGIN_URI, + Auth0SSO.LOGOUT_URI, + Auth0SSO.CALLBACK_URI, + GithubSSO.LOGIN_URI, + GithubSSO.LOGOUT_URI, + GithubSSO.CALLBACK_URI ] -export const OMIT_QUEUE_JOB_DATA = ['componentNodes', 'appDataSource', 'sseStreamer', 'telemetry', 'cachePool'] +export const enum GeneralErrorMessage { + UNAUTHORIZED = 'Unauthorized', + UNHANDLED_EDGE_CASE = 'Unhandled Edge Case', + INVALID_PASSWORD = 'Invalid Password', + NOT_ALLOWED_TO_DELETE_OWNER = 'Not Allowed To Delete Owner', + INTERNAL_SERVER_ERROR = 'Internal Server Error' +} + +export const enum GeneralSuccessMessage { + CREATED = 'Resource Created Successful', + UPDATED = 'Resource Updated Successful', + DELETED = 'Resource Deleted Successful', + FETCHED = 'Resource Fetched Successful', + LOGGED_IN = 'Login Successful', + LOGGED_OUT = 'Logout Successful' +} + +export const DOCUMENT_STORE_BASE_FOLDER = 'docustore' + +export const OMIT_QUEUE_JOB_DATA = [ + 'componentNodes', + 'appDataSource', + 'sseStreamer', + 'telemetry', + 'cachePool', + 'usageCacheManager', + 'abortControllerPool' +] export const INPUT_PARAMS_TYPE = [ 'asyncOptions', @@ -42,3 +100,13 @@ export const INPUT_PARAMS_TYPE = [ 'folder', 'tabs' ] + +export const LICENSE_QUOTAS = { + // Renew per month + PREDICTIONS_LIMIT: 'quota:predictions', + // Static + FLOWS_LIMIT: 'quota:flows', + USERS_LIMIT: 'quota:users', + STORAGE_LIMIT: 'quota:storage', + ADDITIONAL_SEATS_LIMIT: 'quota:additionalSeats' +} as const diff --git a/packages/server/src/utils/createAttachment.ts b/packages/server/src/utils/createAttachment.ts index 8c9bb9958cd..79a4e50af7e 100644 --- a/packages/server/src/utils/createAttachment.ts +++ b/packages/server/src/utils/createAttachment.ts @@ -12,9 +12,12 @@ import { } from 'flowise-components' import { getRunningExpressApp } from './getRunningExpressApp' import { getErrorMessage } from '../errors/utils' +import { checkStorage, updateStorageUsage } from './quotaUsage' +import { ChatFlow } from '../database/entities/ChatFlow' +import { Workspace } from '../enterprise/database/entities/workspace.entity' +import { Organization } from '../enterprise/database/entities/organization.entity' import { InternalFlowiseError } from '../errors/internalFlowiseError' import { StatusCodes } from 'http-status-codes' -import { ChatFlow } from '../database/entities/ChatFlow' /** * Create attachment @@ -46,6 +49,32 @@ export const createFileAttachment = async (req: Request) => { throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Chatflow ${chatflowid} not found`) } + let orgId = req.user?.activeOrganizationId || '' + let workspaceId = req.user?.activeWorkspaceId || '' + let subscriptionId = req.user?.activeOrganizationSubscriptionId || '' + + // This is one of the WHITELIST_URLS, API can be public and there might be no req.user + if (!orgId || !workspaceId) { + const chatflowWorkspaceId = chatflow.workspaceId + const workspace = await appServer.AppDataSource.getRepository(Workspace).findOneBy({ + id: chatflowWorkspaceId + }) + if (!workspace) { + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Workspace ${chatflowWorkspaceId} not found`) + } + workspaceId = workspace.id + + const org = await appServer.AppDataSource.getRepository(Organization).findOneBy({ + id: workspace.organizationId + }) + if (!org) { + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Organization ${workspace.organizationId} not found`) + } + + orgId = org.id + subscriptionId = org.subscriptionId as string + } + // Parse chatbot configuration to get file upload settings let pdfConfig = { usage: 'perPage', @@ -75,6 +104,7 @@ export const createFileAttachment = async (req: Request) => { const fileLoaderNodeInstance = new fileLoaderNodeModule.nodeClass() const options = { retrieveAttachmentChatId: true, + orgId, chatflowid, chatId } @@ -83,13 +113,22 @@ export const createFileAttachment = async (req: Request) => { if (files.length) { const isBase64 = req.body.base64 for (const file of files) { + await checkStorage(orgId, subscriptionId, appServer.usageCacheManager) + const fileBuffer = await getFileFromUpload(file.path ?? file.key) const fileNames: string[] = [] - // Address file name with special characters: https://github.com/expressjs/multer/issues/1104 file.originalname = Buffer.from(file.originalname, 'latin1').toString('utf8') - - const storagePath = await addArrayFilesToStorage(file.mimetype, fileBuffer, file.originalname, fileNames, chatflowid, chatId) + const { path: storagePath, totalSize } = await addArrayFilesToStorage( + file.mimetype, + fileBuffer, + file.originalname, + fileNames, + orgId, + chatflowid, + chatId + ) + await updateStorageUsage(orgId, workspaceId, totalSize, appServer.usageCacheManager) const fileInputFieldFromMimeType = mapMimeTypeToInputField(file.mimetype) diff --git a/packages/server/src/utils/executeCustomNodeFunction.ts b/packages/server/src/utils/executeCustomNodeFunction.ts new file mode 100644 index 00000000000..a22a0291acb --- /dev/null +++ b/packages/server/src/utils/executeCustomNodeFunction.ts @@ -0,0 +1,59 @@ +import { handleEscapeCharacters, ICommonObject } from 'flowise-components' +import { databaseEntities } from '.' +import { InternalFlowiseError } from '../errors/internalFlowiseError' +import { StatusCodes } from 'http-status-codes' +import { getErrorMessage } from '../errors/utils' +import { DataSource } from 'typeorm' +import { IComponentNodes } from '../Interface' + +export const executeCustomNodeFunction = async ({ + appDataSource, + componentNodes, + data +}: { + appDataSource: DataSource + componentNodes: IComponentNodes + data: any +}) => { + try { + const body = data + const jsFunction = typeof body?.javascriptFunction === 'string' ? body.javascriptFunction : '' + const matches = jsFunction.matchAll(/\$([a-zA-Z0-9_]+)/g) + const matchesArray: RegExpMatchArray[] = Array.from(matches) + const functionInputVariables = Object.fromEntries(matchesArray.map((g) => [g[1], undefined])) + if (functionInputVariables && Object.keys(functionInputVariables).length) { + for (const key in functionInputVariables) { + if (key.includes('vars')) { + delete functionInputVariables[key] + } + } + } + const nodeData = { inputs: { functionInputVariables, ...body } } + if (Object.prototype.hasOwnProperty.call(componentNodes, 'customFunction')) { + try { + const nodeInstanceFilePath = componentNodes['customFunction'].filePath as string + const nodeModule = await import(nodeInstanceFilePath) + const newNodeInstance = new nodeModule.nodeClass() + + const options: ICommonObject = { + appDataSource, + databaseEntities + } + + const returnData = await newNodeInstance.init(nodeData, '', options) + const dbResponse = typeof returnData === 'string' ? handleEscapeCharacters(returnData, true) : returnData + + return dbResponse + } catch (error) { + throw new InternalFlowiseError(StatusCodes.INTERNAL_SERVER_ERROR, `Error running custom function: ${error}`) + } + } else { + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Node customFunction not found`) + } + } catch (error) { + throw new InternalFlowiseError( + StatusCodes.INTERNAL_SERVER_ERROR, + `Error: nodesService.executeCustomFunction - ${getErrorMessage(error)}` + ) + } +} diff --git a/packages/server/src/utils/fileRepository.ts b/packages/server/src/utils/fileRepository.ts index 1147aeb98d1..2e017c9fdb9 100644 --- a/packages/server/src/utils/fileRepository.ts +++ b/packages/server/src/utils/fileRepository.ts @@ -1,6 +1,8 @@ import { ChatFlow } from '../database/entities/ChatFlow' import { IReactFlowObject } from '../Interface' import { addBase64FilesToStorage } from 'flowise-components' +import { checkStorage, updateStorageUsage } from './quotaUsage' +import { UsageCacheManager } from '../UsageCacheManager' export const containsBase64File = (chatflow: ChatFlow) => { const parsedFlowData: IReactFlowObject = JSON.parse(chatflow.flowData) @@ -46,11 +48,19 @@ export const containsBase64File = (chatflow: ChatFlow) => { return found } -export const updateFlowDataWithFilePaths = async (chatflowid: string, flowData: string) => { +export const updateFlowDataWithFilePaths = async ( + chatflowid: string, + flowData: string, + orgId: string, + workspaceId: string, + subscriptionId: string, + usageCacheManager: UsageCacheManager +) => { try { const parsedFlowData: IReactFlowObject = JSON.parse(flowData) const re = new RegExp('^data.*;base64', 'i') const nodes = parsedFlowData.nodes + for (let j = 0; j < nodes.length; j++) { const node = nodes[j] if (node.data.category !== 'Document Loaders') { @@ -75,21 +85,26 @@ export const updateFlowDataWithFilePaths = async (chatflowid: string, flowData: for (let j = 0; j < files.length; j++) { const file = files[j] if (re.test(file)) { - node.data.inputs[key] = await addBase64FilesToStorage(file, chatflowid, fileNames) + await checkStorage(orgId, subscriptionId, usageCacheManager) + const { path, totalSize } = await addBase64FilesToStorage(file, chatflowid, fileNames, orgId) + node.data.inputs[key] = path + await updateStorageUsage(orgId, workspaceId, totalSize, usageCacheManager) } } } catch (e) { continue } } else if (re.test(input)) { - node.data.inputs[key] = await addBase64FilesToStorage(input, chatflowid, fileNames) + await checkStorage(orgId, subscriptionId, usageCacheManager) + const { path, totalSize } = await addBase64FilesToStorage(input, chatflowid, fileNames, orgId) + node.data.inputs[key] = path + await updateStorageUsage(orgId, workspaceId, totalSize, usageCacheManager) } } } } - return JSON.stringify(parsedFlowData) - } catch (e) { - return '' + } catch (e: any) { + throw new Error(`Error updating flow data with file paths: ${e.message}`) } } diff --git a/packages/server/src/utils/getChatMessage.ts b/packages/server/src/utils/getChatMessage.ts index 9d8726f0e5e..171bba14c07 100644 --- a/packages/server/src/utils/getChatMessage.ts +++ b/packages/server/src/utils/getChatMessage.ts @@ -2,6 +2,7 @@ import { MoreThanOrEqual, LessThanOrEqual, Between, In } from 'typeorm' import { ChatMessageRatingType, ChatType } from '../Interface' import { ChatMessage } from '../database/entities/ChatMessage' import { ChatMessageFeedback } from '../database/entities/ChatMessageFeedback' +import { ChatFlow } from '../database/entities/ChatFlow' import { getRunningExpressApp } from '../utils/getRunningExpressApp' import { aMonthAgo } from '.' @@ -30,6 +31,7 @@ interface GetChatMessageParams { messageId?: string feedback?: boolean feedbackTypes?: ChatMessageRatingType[] + activeWorkspaceId?: string } export const utilGetChatMessage = async ({ @@ -43,10 +45,21 @@ export const utilGetChatMessage = async ({ endDate, messageId, feedback, - feedbackTypes + feedbackTypes, + activeWorkspaceId }: GetChatMessageParams): Promise => { const appServer = getRunningExpressApp() + // Check if chatflow workspaceId is same as activeWorkspaceId + if (activeWorkspaceId) { + const chatflow = await appServer.AppDataSource.getRepository(ChatFlow).findOneBy({ + id: chatflowid + }) + if (chatflow?.workspaceId !== activeWorkspaceId) { + throw new Error('Unauthorized access') + } + } + if (feedback) { const query = await appServer.AppDataSource.getRepository(ChatMessage).createQueryBuilder('chat_message') @@ -102,6 +115,7 @@ export const utilGetChatMessage = async ({ } let createdDateQuery + if (startDate || endDate) { if (startDate && endDate) { createdDateQuery = Between(new Date(startDate), new Date(endDate)) @@ -112,7 +126,7 @@ export const utilGetChatMessage = async ({ } } - return await appServer.AppDataSource.getRepository(ChatMessage).find({ + const messages = await appServer.AppDataSource.getRepository(ChatMessage).find({ where: { chatflowid, chatType: chatTypes?.length ? In(chatTypes) : undefined, @@ -129,4 +143,6 @@ export const utilGetChatMessage = async ({ createdDate: sortOrder === 'DESC' ? 'DESC' : 'ASC' } }) + + return messages } diff --git a/packages/server/src/utils/index.ts b/packages/server/src/utils/index.ts index 48303a91196..11003095252 100644 --- a/packages/server/src/utils/index.ts +++ b/packages/server/src/utils/index.ts @@ -5,6 +5,7 @@ import path from 'path' import fs from 'fs' import logger from './logger' +import { v4 as uuidv4 } from 'uuid' import { IChatFlow, IComponentCredentials, @@ -63,6 +64,8 @@ import { SecretsManagerClient, SecretsManagerClientConfig } from '@aws-sdk/client-secrets-manager' +import { checkStorage, updateStorageUsage } from './quotaUsage' +import { UsageCacheManager } from '../UsageCacheManager' export const QUESTION_VAR_PREFIX = 'question' export const FILE_ATTACHMENT_PREFIX = 'file_attachment' @@ -203,6 +206,22 @@ export const constructGraphs = ( return { graph, nodeDependencies } } +/** + * Get starting node and check if flow is valid + * @param {INodeDependencies} nodeDependencies + */ +export const getStartingNode = (nodeDependencies: INodeDependencies) => { + // Find starting node + const startingNodeIds = [] as string[] + Object.keys(nodeDependencies).forEach((nodeId) => { + if (nodeDependencies[nodeId] === 0) { + startingNodeIds.push(nodeId) + } + }) + + return { startingNodeIds } +} + /** * Get starting nodes and check if flow is valid * @param {INodeDependencies} graph @@ -239,22 +258,6 @@ export const getStartingNodes = (graph: INodeDirectedGraph, endNodeId: string) = return { startingNodeIds, depthQueue: depthQueueReversed } } -/** - * Get starting node and check if flow is valid - * @param {INodeDependencies} nodeDependencies - */ -export const getStartingNode = (nodeDependencies: INodeDependencies) => { - // Find starting node - const startingNodeIds = [] as string[] - Object.keys(nodeDependencies).forEach((nodeId) => { - if (nodeDependencies[nodeId] === 0) { - startingNodeIds.push(nodeId) - } - }) - - return { startingNodeIds } -} - /** * Get all connected nodes from startnode * @param {INodeDependencies} graph @@ -497,6 +500,10 @@ type BuildFlowParams = { stopNodeId?: string uploads?: IFileUpload[] baseURL?: string + orgId?: string + workspaceId?: string + subscriptionId?: string + usageCacheManager?: UsageCacheManager uploadedFilesContent?: string } @@ -528,7 +535,11 @@ export const buildFlow = async ({ isUpsert, stopNodeId, uploads, - baseURL + baseURL, + orgId, + workspaceId, + subscriptionId, + usageCacheManager }: BuildFlowParams) => { const flowNodes = cloneDeep(reactFlowNodes) @@ -591,8 +602,11 @@ export const buildFlow = async ({ ) if (isUpsert && stopNodeId && nodeId === stopNodeId) { - logger.debug(`[server]: Upserting ${reactFlowNode.data.label} (${reactFlowNode.data.id})`) + logger.debug(`[server]: [${orgId}]: Upserting ${reactFlowNode.data.label} (${reactFlowNode.data.id})`) const indexResult = await newNodeInstance.vectorStoreMethods!['upsert']!.call(newNodeInstance, reactFlowNodeData, { + orgId, + workspaceId, + subscriptionId, chatId, sessionId, chatflowid, @@ -602,12 +616,13 @@ export const buildFlow = async ({ appDataSource, databaseEntities, cachePool, + usageCacheManager, dynamicVariables, uploads, baseURL }) if (indexResult) upsertHistory['result'] = indexResult - logger.debug(`[server]: Finished upserting ${reactFlowNode.data.label} (${reactFlowNode.data.id})`) + logger.debug(`[server]: [${orgId}]: Finished upserting ${reactFlowNode.data.label} (${reactFlowNode.data.id})`) break } else if ( !isUpsert && @@ -616,9 +631,12 @@ export const buildFlow = async ({ ) { initializedNodes.add(nodeId) } else { - logger.debug(`[server]: Initializing ${reactFlowNode.data.label} (${reactFlowNode.data.id})`) + logger.debug(`[server]: [${orgId}]: Initializing ${reactFlowNode.data.label} (${reactFlowNode.data.id})`) const finalQuestion = uploadedFilesContent ? `${uploadedFilesContent}\n\n${question}` : question let outputResult = await newNodeInstance.init(reactFlowNodeData, finalQuestion, { + orgId, + workspaceId, + subscriptionId, chatId, sessionId, chatflowid, @@ -627,11 +645,14 @@ export const buildFlow = async ({ appDataSource, databaseEntities, cachePool, + usageCacheManager, isUpsert, dynamicVariables, uploads, baseURL, - componentNodes: componentNodes as ICommonObject + componentNodes, + updateStorageUsage, + checkStorage }) // Save dynamic variables @@ -676,11 +697,11 @@ export const buildFlow = async ({ flowNodes[nodeIndex].data.instance = outputResult - logger.debug(`[server]: Finished initializing ${reactFlowNode.data.label} (${reactFlowNode.data.id})`) + logger.debug(`[server]: [${orgId}]: Finished initializing ${reactFlowNode.data.label} (${reactFlowNode.data.id})`) initializedNodes.add(reactFlowNode.data.id) } } catch (e: any) { - logger.error(e) + logger.error(`[server]: [${orgId}]:`, e) throw new Error(e) } @@ -744,6 +765,7 @@ export const clearSessionMemory = async ( componentNodes: IComponentNodes, chatId: string, appDataSource: DataSource, + orgId?: string, sessionId?: string, memoryType?: string, isClearFromViewMessageDialog?: string @@ -757,7 +779,7 @@ export const clearSessionMemory = async ( const nodeInstanceFilePath = componentNodes[node.data.name].filePath as string const nodeModule = await import(nodeInstanceFilePath) const newNodeInstance = new nodeModule.nodeClass() - const options: ICommonObject = { chatId, appDataSource, databaseEntities, logger } + const options: ICommonObject = { orgId, chatId, appDataSource, databaseEntities, logger } // SessionId always take priority first because it is the sessionId used for 3rd party memory node if (sessionId && node.data.inputs) { @@ -1261,7 +1283,6 @@ export const findAvailableConfigs = (reactFlowNodes: IReactFlowNode[], component for (const flowNode of reactFlowNodes) { for (const inputParam of flowNode.data.inputParams) { let obj: IOverrideConfig | undefined - if (inputParam.type === 'file') { obj = { node: flowNode.data.label, @@ -1500,7 +1521,6 @@ export const decryptCredentialData = async ( if (USE_AWS_SECRETS_MANAGER && secretsManagerClient) { try { - logger.info(`[server]: Reading AWS Secret: ${encryptedData}`) if (encryptedData.startsWith('FlowiseCredential_')) { const command = new GetSecretValueCommand({ SecretId: encryptedData }) const response = await secretsManagerClient.send(command) @@ -1567,6 +1587,10 @@ export const transformToCredentialEntity = async (body: ICredentialReqBody): Pro const newCredential = new Credential() Object.assign(newCredential, credentialBody) + if (body.workspaceId) { + newCredential.workspaceId = body.workspaceId + } + return newCredential } @@ -1734,21 +1758,6 @@ export const getTelemetryFlowObj = (nodes: IReactFlowNode[], edges: IReactFlowEd return { nodes: nodeData, edges: edgeData } } -/** - * Get user settings file - * TODO: move env variables to settings json file, easier configuration - */ -export const getUserSettingsFilePath = () => { - if (process.env.SECRETKEY_PATH) return path.join(process.env.SECRETKEY_PATH, 'settings.json') - const checkPaths = [path.join(getUserHome(), '.flowise', 'settings.json')] - for (const checkPath of checkPaths) { - if (fs.existsSync(checkPath)) { - return checkPath - } - } - return '' -} - /** * Get app current version */ @@ -1815,14 +1824,8 @@ export const getUploadPath = (): string => { : path.join(getUserHome(), '.flowise', 'uploads') } -const getOrgId = () => { - const settingsContent = fs.readFileSync(getUserSettingsFilePath(), 'utf8') - try { - const settings = JSON.parse(settingsContent) - return settings.instanceId - } catch (error) { - return '' - } +export function generateId() { + return uuidv4() } export const getMulterStorage = () => { @@ -1837,10 +1840,10 @@ export const getMulterStorage = () => { s3: s3Client, bucket: Bucket, metadata: function (req, file, cb) { - cb(null, { fieldName: file.fieldname, originalName: file.originalname, orgId: getOrgId() }) + cb(null, { fieldName: file.fieldname, originalName: file.originalname }) }, key: function (req, file, cb) { - cb(null, `${getOrgId()}/${Date.now().toString()}`) + cb(null, `${generateId()}`) } }) }) @@ -1852,7 +1855,7 @@ export const getMulterStorage = () => { bucket: process.env.GOOGLE_CLOUD_STORAGE_BUCKET_NAME, keyFilename: process.env.GOOGLE_CLOUD_STORAGE_CREDENTIAL, uniformBucketLevelAccess: Boolean(process.env.GOOGLE_CLOUD_UNIFORM_BUCKET_ACCESS) ?? true, - destination: `uploads/${getOrgId()}` + destination: `uploads/${generateId()}` }) }) } else { diff --git a/packages/server/src/utils/logger.ts b/packages/server/src/utils/logger.ts index 7ad5b58a461..64d016aaebc 100644 --- a/packages/server/src/utils/logger.ts +++ b/packages/server/src/utils/logger.ts @@ -4,6 +4,7 @@ import { hostname } from 'node:os' import config from './config' // should be replaced by node-config or similar import { createLogger, transports, format } from 'winston' import { NextFunction, Request, Response } from 'express' +import DailyRotateFile from 'winston-daily-rotate-file' import { S3ClientConfig } from '@aws-sdk/client-s3' import { LoggingWinston } from '@google-cloud/logging-winston' @@ -114,13 +115,11 @@ const logger = createLogger({ new transports.Console(), ...(!process.env.STORAGE_TYPE || process.env.STORAGE_TYPE === 'local' ? [ - new transports.File({ - filename: path.join(logDir, config.logging.server.filename ?? 'server.log'), + new DailyRotateFile({ + filename: path.join(logDir, config.logging.server.filename ?? 'server-%DATE%.log'), + datePattern: 'YYYY-MM-DD-HH', + maxSize: '20m', level: config.logging.server.level ?? 'info' - }), - new transports.File({ - filename: path.join(logDir, config.logging.server.errorFilename ?? 'server-error.log'), - level: 'error' // Log only errors to this file }) ] : []), @@ -134,13 +133,7 @@ const logger = createLogger({ ...(process.env.STORAGE_TYPE === 'gcs' ? [gcsServerStream] : []) ], exceptionHandlers: [ - ...(!process.env.STORAGE_TYPE || process.env.STORAGE_TYPE === 'local' - ? [ - new transports.File({ - filename: path.join(logDir, config.logging.server.errorFilename ?? 'server-error.log') - }) - ] - : []), + ...(process.env.DEBUG && process.env.DEBUG === 'true' ? [new transports.Console()] : []), ...(process.env.STORAGE_TYPE === 's3' ? [ new transports.Stream({ @@ -151,13 +144,7 @@ const logger = createLogger({ ...(process.env.STORAGE_TYPE === 'gcs' ? [gcsErrorStream] : []) ], rejectionHandlers: [ - ...(!process.env.STORAGE_TYPE || process.env.STORAGE_TYPE === 'local' - ? [ - new transports.File({ - filename: path.join(logDir, config.logging.server.errorFilename ?? 'server-error.log') - }) - ] - : []), + ...(process.env.DEBUG && process.env.DEBUG === 'true' ? [new transports.Console()] : []), ...(process.env.STORAGE_TYPE === 's3' ? [ new transports.Stream({ @@ -171,7 +158,14 @@ const logger = createLogger({ export function expressRequestLogger(req: Request, res: Response, next: NextFunction): void { const unwantedLogURLs = ['/api/v1/node-icon/', '/api/v1/components-credentials-icon/', '/api/v1/ping'] + if (/\/api\/v1\//i.test(req.url) && !unwantedLogURLs.some((url) => new RegExp(url, 'i').test(req.url))) { + // Create a sanitized copy of the request body + const sanitizedBody = { ...req.body } + if (sanitizedBody.password) { + sanitizedBody.password = '********' + } + const fileLogger = createLogger({ format: combine(timestamp({ format: 'YYYY-MM-DD HH:mm:ss' }), format.json(), errors({ stack: true })), defaultMeta: { @@ -179,13 +173,14 @@ export function expressRequestLogger(req: Request, res: Response, next: NextFunc request: { method: req.method, url: req.url, - body: req.body, + body: sanitizedBody, // Use sanitized body instead of raw body query: req.query, params: req.params, headers: req.headers } }, transports: [ + ...(process.env.DEBUG && process.env.DEBUG === 'true' ? [new transports.Console()] : []), ...(!process.env.STORAGE_TYPE || process.env.STORAGE_TYPE === 'local' ? [ new transports.File({ diff --git a/packages/server/src/utils/quotaUsage.ts b/packages/server/src/utils/quotaUsage.ts new file mode 100644 index 00000000000..e2cf382d4dc --- /dev/null +++ b/packages/server/src/utils/quotaUsage.ts @@ -0,0 +1,171 @@ +import { StatusCodes } from 'http-status-codes' +import { InternalFlowiseError } from '../errors/internalFlowiseError' +import { UsageCacheManager } from '../UsageCacheManager' +import { LICENSE_QUOTAS } from './constants' +import logger from './logger' + +type UsageType = 'flows' | 'users' +export const ENTERPRISE_FEATURE_FLAGS = [ + //'feat:account', // Only for Cloud + 'feat:datasets', + 'feat:evaluations', + 'feat:evaluators', + 'feat:files', + 'feat:login-activity', + 'feat:users', + 'feat:workspaces', + 'feat:logs', + 'feat:roles', + 'feat:sso-config' +] + +export const getCurrentUsage = async (orgId: string, subscriptionId: string, usageCacheManager: UsageCacheManager) => { + try { + if (!usageCacheManager || !subscriptionId || !orgId) return + + const currentStorageUsage = (await usageCacheManager.get(`storage:${orgId}`)) || 0 + const currentPredictionsUsage = (await usageCacheManager.get(`predictions:${orgId}`)) || 0 + + const quotas = await usageCacheManager.getQuotas(subscriptionId) + const storageLimit = quotas[LICENSE_QUOTAS.STORAGE_LIMIT] + const predLimit = quotas[LICENSE_QUOTAS.PREDICTIONS_LIMIT] + + return { + predictions: { + usage: currentPredictionsUsage, + limit: predLimit + }, + storage: { + usage: currentStorageUsage, + limit: storageLimit + } + } + } catch (error) { + logger.error(`[getCurrentUsage] Error getting usage: ${error}`) + throw error + } +} + +// For usage that doesn't renew per month, we just get the count from database and check +export const checkUsageLimit = async ( + type: UsageType, + subscriptionId: string, + usageCacheManager: UsageCacheManager, + currentUsage: number +) => { + if (!usageCacheManager || !subscriptionId) return + + const quotas = await usageCacheManager.getQuotas(subscriptionId) + + let limit = -1 + switch (type) { + case 'flows': + limit = quotas[LICENSE_QUOTAS.FLOWS_LIMIT] + break + case 'users': + limit = quotas[LICENSE_QUOTAS.USERS_LIMIT] + (Math.max(quotas[LICENSE_QUOTAS.ADDITIONAL_SEATS_LIMIT], 0) || 0) + break + } + + if (limit === -1) return + + if (currentUsage > limit) { + throw new InternalFlowiseError(StatusCodes.TOO_MANY_REQUESTS, `Limit exceeded: ${type}`) + } +} + +// As predictions limit renew per month, we set to cache with 1 month TTL +export const updatePredictionsUsage = async ( + orgId: string, + subscriptionId: string, + _: string = '', + usageCacheManager?: UsageCacheManager +) => { + if (!usageCacheManager) return + + const quotas = await usageCacheManager.getQuotas(subscriptionId) + const predictionsLimit = quotas[LICENSE_QUOTAS.PREDICTIONS_LIMIT] + + let currentPredictions = 0 + const existingPredictions = await usageCacheManager.get(`predictions:${orgId}`) + if (existingPredictions) { + currentPredictions = 1 + (existingPredictions as number) > predictionsLimit ? predictionsLimit : 1 + (existingPredictions as number) + } else { + currentPredictions = 1 + } + + const currentTTL = await usageCacheManager.getTTL(`predictions:${orgId}`) + if (currentTTL) { + const currentTimestamp = Date.now() + const timeLeft = currentTTL - currentTimestamp + usageCacheManager.set(`predictions:${orgId}`, currentPredictions, timeLeft) + } else { + const subscriptionDetails = await usageCacheManager.getSubscriptionDetails(subscriptionId) + if (subscriptionDetails && subscriptionDetails.created) { + const MS_PER_DAY = 24 * 60 * 60 * 1000 + const DAYS = 30 + const approximateMonthMs = DAYS * MS_PER_DAY + + // Calculate time elapsed since subscription creation + const createdTimestamp = subscriptionDetails.created * 1000 // Convert to milliseconds if timestamp is in seconds + const currentTimestamp = Date.now() + const timeElapsed = currentTimestamp - createdTimestamp + + // Calculate remaining time in the current month period + const timeLeft = approximateMonthMs - (timeElapsed % approximateMonthMs) + + usageCacheManager.set(`predictions:${orgId}`, currentPredictions, timeLeft) + } else { + // Fallback to default 30 days if no creation date + const MS_PER_DAY = 24 * 60 * 60 * 1000 + const DAYS = 30 + const approximateMonthMs = DAYS * MS_PER_DAY + usageCacheManager.set(`predictions:${orgId}`, currentPredictions, approximateMonthMs) + } + } +} + +export const checkPredictions = async (orgId: string, subscriptionId: string, usageCacheManager: UsageCacheManager) => { + if (!usageCacheManager || !subscriptionId) return + + const currentPredictions: number = (await usageCacheManager.get(`predictions:${orgId}`)) || 0 + + const quotas = await usageCacheManager.getQuotas(subscriptionId) + const predictionsLimit = quotas[LICENSE_QUOTAS.PREDICTIONS_LIMIT] + if (predictionsLimit === -1) return + + if (currentPredictions >= predictionsLimit) { + throw new InternalFlowiseError(StatusCodes.TOO_MANY_REQUESTS, 'Predictions limit exceeded') + } + + return { + usage: currentPredictions, + limit: predictionsLimit + } +} + +// Storage does not renew per month nor do we store the total size in database, so we just store the total size in cache +export const updateStorageUsage = (orgId: string, _: string = '', totalSize: number, usageCacheManager?: UsageCacheManager) => { + if (!usageCacheManager) return + usageCacheManager.set(`storage:${orgId}`, totalSize) +} + +export const checkStorage = async (orgId: string, subscriptionId: string, usageCacheManager: UsageCacheManager) => { + if (!usageCacheManager || !subscriptionId) return + + let currentStorageUsage = 0 + currentStorageUsage = (await usageCacheManager.get(`storage:${orgId}`)) || 0 + + const quotas = await usageCacheManager.getQuotas(subscriptionId) + const storageLimit = quotas[LICENSE_QUOTAS.STORAGE_LIMIT] + if (storageLimit === -1) return + + if (currentStorageUsage >= storageLimit) { + throw new InternalFlowiseError(StatusCodes.TOO_MANY_REQUESTS, 'Storage limit exceeded') + } + + return { + usage: currentStorageUsage, + limit: storageLimit + } +} diff --git a/packages/server/src/utils/telemetry.ts b/packages/server/src/utils/telemetry.ts index cd26c8c933a..99dc023c752 100644 --- a/packages/server/src/utils/telemetry.ts +++ b/packages/server/src/utils/telemetry.ts @@ -1,8 +1,11 @@ import { v4 as uuidv4 } from 'uuid' import { PostHog } from 'posthog-node' -import path from 'path' -import fs from 'fs' -import { getUserHome, getUserSettingsFilePath } from '.' +import { getAppVersion } from '../utils' + +export enum TelemetryEventType { + 'USER_CREATED' = 'user_created', + 'ORGANIZATION_CREATED' = 'organization_created' +} export class Telemetry { postHog?: PostHog @@ -15,27 +18,10 @@ export class Telemetry { } } - async id(): Promise { - try { - const settingsContent = await fs.promises.readFile(getUserSettingsFilePath(), 'utf8') - const settings = JSON.parse(settingsContent) - return settings.instanceId - } catch (error) { - const instanceId = uuidv4() - const settings = { - instanceId - } - const defaultLocation = process.env.SECRETKEY_PATH - ? path.join(process.env.SECRETKEY_PATH, 'settings.json') - : path.join(getUserHome(), '.flowise', 'settings.json') - await fs.promises.writeFile(defaultLocation, JSON.stringify(settings, null, 2)) - return instanceId - } - } - - async sendTelemetry(event: string, properties = {}): Promise { + async sendTelemetry(event: string, properties: Record = {}, orgId = ''): Promise { + properties.version = await getAppVersion() if (this.postHog) { - const distinctId = await this.id() + const distinctId = orgId || uuidv4() this.postHog.capture({ event, distinctId, diff --git a/packages/server/src/utils/upsertVector.ts b/packages/server/src/utils/upsertVector.ts index c60e5f37430..d292ec2dfc6 100644 --- a/packages/server/src/utils/upsertVector.ts +++ b/packages/server/src/utils/upsertVector.ts @@ -28,11 +28,15 @@ import { getRunningExpressApp } from '../utils/getRunningExpressApp' import { UpsertHistory } from '../database/entities/UpsertHistory' import { InternalFlowiseError } from '../errors/internalFlowiseError' import { StatusCodes } from 'http-status-codes' +import { checkStorage, updateStorageUsage } from './quotaUsage' import { getErrorMessage } from '../errors/utils' import { v4 as uuidv4 } from 'uuid' import { FLOWISE_COUNTER_STATUS, FLOWISE_METRIC_COUNTERS } from '../Interface.Metrics' import { Variable } from '../database/entities/Variable' +import { getWorkspaceSearchOptions } from '../enterprise/utils/ControllerServiceUtils' import { OMIT_QUEUE_JOB_DATA } from './constants' +import { Workspace } from '../enterprise/database/entities/workspace.entity' +import { Organization } from '../enterprise/database/entities/organization.entity' export const executeUpsert = async ({ componentNodes, @@ -43,7 +47,11 @@ export const executeUpsert = async ({ telemetry, cachePool, isInternal, - files + files, + orgId, + workspaceId, + subscriptionId, + usageCacheManager }: IExecuteFlowParams) => { const question = incomingInput.question let overrideConfig = incomingInput.overrideConfig ?? {} @@ -56,11 +64,21 @@ export const executeUpsert = async ({ if (files?.length) { overrideConfig = { ...incomingInput } for (const file of files) { + await checkStorage(orgId, subscriptionId, usageCacheManager) + const fileNames: string[] = [] const fileBuffer = await getFileFromUpload(file.path ?? file.key) // Address file name with special characters: https://github.com/expressjs/multer/issues/1104 file.originalname = Buffer.from(file.originalname, 'latin1').toString('utf8') - const storagePath = await addArrayFilesToStorage(file.mimetype, fileBuffer, file.originalname, fileNames, chatflowid) + const { path: storagePath, totalSize } = await addArrayFilesToStorage( + file.mimetype, + fileBuffer, + file.originalname, + fileNames, + orgId, + chatflowid + ) + await updateStorageUsage(orgId, workspaceId, totalSize, usageCacheManager) const fileInputFieldFromMimeType = mapMimeTypeToInputField(file.mimetype) @@ -147,7 +165,7 @@ export const executeUpsert = async ({ const { startingNodeIds, depthQueue } = getStartingNodes(filteredGraph, stopNodeId) /*** Get API Config ***/ - const availableVariables = await appDataSource.getRepository(Variable).find() + const availableVariables = await appDataSource.getRepository(Variable).findBy(getWorkspaceSearchOptions(chatflow.workspaceId)) const { nodeOverrides, variableOverrides, apiOverrideStatus } = getAPIOverrideConfig(chatflow) const upsertedResult = await buildFlow({ @@ -164,14 +182,18 @@ export const executeUpsert = async ({ sessionId, chatflowid, appDataSource, + usageCacheManager, + cachePool, + isUpsert, + stopNodeId, overrideConfig, apiOverrideStatus, nodeOverrides, availableVariables, variableOverrides, - cachePool, - isUpsert, - stopNodeId + orgId, + workspaceId, + subscriptionId }) // Save to DB @@ -186,13 +208,17 @@ export const executeUpsert = async ({ await appDataSource.getRepository(UpsertHistory).save(upsertHistory) } - await telemetry.sendTelemetry('vector_upserted', { - version: await getAppVersion(), - chatlowId: chatflowid, - type: isInternal ? ChatType.INTERNAL : ChatType.EXTERNAL, - flowGraph: getTelemetryFlowObj(nodes, edges), - stopNodeId - }) + await telemetry.sendTelemetry( + 'vector_upserted', + { + version: await getAppVersion(), + chatlowId: chatflowid, + type: isInternal ? ChatType.INTERNAL : ChatType.EXTERNAL, + flowGraph: getTelemetryFlowObj(nodes, edges), + stopNodeId + }, + orgId + ) return upsertedResult['result'] ?? { result: 'Successfully Upserted' } } @@ -204,6 +230,7 @@ export const executeUpsert = async ({ */ export const upsertVector = async (req: Request, isInternal: boolean = false) => { const appServer = getRunningExpressApp() + try { const chatflowid = req.params.id @@ -228,6 +255,26 @@ export const upsertVector = async (req: Request, isInternal: boolean = false) => } } + // This can be public API, so we can only get orgId from the chatflow + const chatflowWorkspaceId = chatflow.workspaceId + const workspace = await appServer.AppDataSource.getRepository(Workspace).findOneBy({ + id: chatflowWorkspaceId + }) + if (!workspace) { + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Workspace ${chatflowWorkspaceId} not found`) + } + const workspaceId = workspace.id + + const org = await appServer.AppDataSource.getRepository(Organization).findOneBy({ + id: workspace.organizationId + }) + if (!org) { + throw new InternalFlowiseError(StatusCodes.NOT_FOUND, `Organization ${workspace.organizationId} not found`) + } + + const orgId = org.id + const subscriptionId = org.subscriptionId as string + const executeData: IExecuteFlowParams = { componentNodes: appServer.nodesPool.componentNodes, incomingInput, @@ -237,17 +284,21 @@ export const upsertVector = async (req: Request, isInternal: boolean = false) => telemetry: appServer.telemetry, cachePool: appServer.cachePool, sseStreamer: appServer.sseStreamer, + usageCacheManager: appServer.usageCacheManager, baseURL, isInternal, files, - isUpsert: true + isUpsert: true, + orgId, + workspaceId, + subscriptionId } if (process.env.MODE === MODE.QUEUE) { const upsertQueue = appServer.queueManager.getQueue('upsert') const job = await upsertQueue.addJob(omit(executeData, OMIT_QUEUE_JOB_DATA)) - logger.debug(`[server]: Job added to queue: ${job.id}`) + logger.debug(`[server]: [${orgId}]: Job added to queue: ${job.id}`) const queueEvents = upsertQueue.getQueueEvents() const result = await job.waitUntilFinished(queueEvents) diff --git a/packages/server/src/utils/validateKey.ts b/packages/server/src/utils/validateKey.ts index 2eef55f2dd1..2eb539de97c 100644 --- a/packages/server/src/utils/validateKey.ts +++ b/packages/server/src/utils/validateKey.ts @@ -19,6 +19,7 @@ export const validateChatflowAPIKey = async (req: Request, chatflow: ChatFlow) = if (suppliedKey) { const keys = await apikeyService.getAllApiKeys() const apiSecret = keys.find((key: any) => key.id === chatFlowApiKeyId)?.apiSecret + if (!apiSecret) return false if (!compareKeys(apiSecret, suppliedKey)) return false return true } @@ -34,6 +35,7 @@ export const validateAPIKey = async (req: Request) => { if (!authorizationHeader) return false const suppliedKey = authorizationHeader.split(`Bearer `).pop() + if (suppliedKey) { const keys = await apikeyService.getAllApiKeys() const apiSecret = keys.find((key: any) => key.apiKey === suppliedKey)?.apiSecret @@ -43,3 +45,19 @@ export const validateAPIKey = async (req: Request) => { } return false } + +/** + * Get API Key WorkspaceID + * @param {Request} req + */ +export const getAPIKeyWorkspaceID = async (req: Request) => { + const authorizationHeader = (req.headers['Authorization'] as string) ?? (req.headers['authorization'] as string) ?? '' + if (!authorizationHeader) return false + + const suppliedKey = authorizationHeader.split(`Bearer `).pop() + if (suppliedKey) { + const key = await apikeyService.getApiKey(suppliedKey) + return key?.workspaceId + } + return undefined +} diff --git a/packages/server/test/index.test.ts b/packages/server/test/index.test.ts new file mode 100644 index 00000000000..8c038f44f62 --- /dev/null +++ b/packages/server/test/index.test.ts @@ -0,0 +1,28 @@ +import * as Server from '../src' +import { getRunningExpressApp } from '../src/utils/getRunningExpressApp' +import { organizationUserRouteTest } from './routes/v1/organization-user.route.test' +import { userRouteTest } from './routes/v1/user.route.test' +import { apiKeyTest } from './utils/api-key.util.test' + +// ⏱️ Extend test timeout to 6 minutes for long setups (increase as tests grow) +jest.setTimeout(360000) + +beforeAll(async () => { + await Server.start() + + // ⏳ Wait 3 minutes for full server and database init (esp. on lower end hardware) + await new Promise((resolve) => setTimeout(resolve, 3 * 60 * 1000)) +}) + +afterAll(async () => { + await getRunningExpressApp().stopApp() +}) + +describe('Routes Test', () => { + userRouteTest() + organizationUserRouteTest() +}) + +describe('Utils Test', () => { + apiKeyTest() +}) diff --git a/packages/server/test/routes/v1/organization-user.route.test.ts b/packages/server/test/routes/v1/organization-user.route.test.ts new file mode 100644 index 00000000000..d143c2e547a --- /dev/null +++ b/packages/server/test/routes/v1/organization-user.route.test.ts @@ -0,0 +1,39 @@ +import { StatusCodes } from 'http-status-codes' +import supertest from 'supertest' +import { getRunningExpressApp } from '../../../src/utils/getRunningExpressApp' + +export function organizationUserRouteTest() { + describe('Organization User Route', () => { + const route = '/api/v1/user' + + describe(`GET ${route}/test successful without user status`, () => { + const statusCode = StatusCodes.OK + const message = 'Hello World' + + it(`should return a ${statusCode} status and message of ${message}`, async () => { + await supertest(getRunningExpressApp().app) + .get(`${route + '/test'}`) + .expect(statusCode) + .then((response) => { + const body = response.body + expect(body.message).toEqual(message) + }) + }) + }) + + describe(`POST ${route}/test successful without user status`, () => { + const statusCode = StatusCodes.OK + const message = 'Hello World' + + it(`should return a ${statusCode} status and message of ${message}`, async () => { + await supertest(getRunningExpressApp().app) + .get(`${route + '/test'}`) + .expect(statusCode) + .then((response) => { + const body = response.body + expect(body.message).toEqual(message) + }) + }) + }) + }) +} diff --git a/packages/server/test/routes/v1/user.route.test.ts b/packages/server/test/routes/v1/user.route.test.ts new file mode 100644 index 00000000000..cdab9d99558 --- /dev/null +++ b/packages/server/test/routes/v1/user.route.test.ts @@ -0,0 +1,54 @@ +import { StatusCodes } from 'http-status-codes' +import supertest from 'supertest' +import { getRunningExpressApp } from '../../../src/utils/getRunningExpressApp' + +export function userRouteTest() { + describe('User Route', () => { + const route = '/api/v1/user' + + describe(`GET ${route}/test successful without user status`, () => { + const statusCode = StatusCodes.OK + const message = 'Hello World' + + it(`should return a ${statusCode} status and message of ${message}`, async () => { + await supertest(getRunningExpressApp().app) + .get(`${route + '/test'}`) + .expect(statusCode) + .then((response) => { + const body = response.body + expect(body.message).toEqual(message) + }) + }) + }) + + describe(`POST ${route}/test successful without user status`, () => { + const statusCode = StatusCodes.OK + const message = 'Hello World' + + it(`should return a ${statusCode} status and message of ${message}`, async () => { + await supertest(getRunningExpressApp().app) + .get(`${route + '/test'}`) + .expect(statusCode) + .then((response) => { + const body = response.body + expect(body.message).toEqual(message) + }) + }) + }) + + describe(`PUT ${route}/test successful without user status`, () => { + const statusCode = StatusCodes.OK + const message = 'Hello World' + + it(`should return a ${statusCode} status and message of ${message}`, async () => { + await supertest(getRunningExpressApp().app) + .get(`${route + '/test'}`) + .expect(statusCode) + .then((response) => { + const body = response.body + expect(body.message).toEqual(message) + }) + }) + }) + }) +} diff --git a/packages/server/test/utils/api-key.util.test.ts b/packages/server/test/utils/api-key.util.test.ts new file mode 100644 index 00000000000..ccf1bfd5a1a --- /dev/null +++ b/packages/server/test/utils/api-key.util.test.ts @@ -0,0 +1,10 @@ +import { generateAPIKey } from '../../src/utils/apiKey' + +export function apiKeyTest() { + describe('Api Key', () => { + it('should be able to generate a new api key', () => { + const apiKey = generateAPIKey() + expect(typeof apiKey === 'string').toEqual(true) + }) + }) +} diff --git a/packages/server/test/utils/validateKey.test.ts b/packages/server/test/utils/validateKey.test.ts deleted file mode 100644 index cf3552c1e93..00000000000 --- a/packages/server/test/utils/validateKey.test.ts +++ /dev/null @@ -1,41 +0,0 @@ -import { Request } from 'express' -import { ChatFlow } from '../../src/database/entities/ChatFlow' -import { validateChatflowAPIKey } from '../../src/utils/validateKey' -import { compareKeys, getAPIKeys } from '../../src/utils/apiKey' - -jest.mock('../../src/utils/apiKey') - -describe('validateChatflowAPIKey', () => { - let req: Partial - let chatflow: ChatFlow - - beforeEach(() => { - req = { - headers: {} - } - chatflow = { - apikeyid: null - } as ChatFlow - }) - - it('should return true if chatflow.apikeyid is not set', async () => { - const result = await validateChatflowAPIKey(req as Request, chatflow) - expect(result).toBe(true) - }) - - it('should return false if chatflow.apikeyid is set but authorization header is missing', async () => { - chatflow.apikeyid = 'some-api-key-id' - const result = await validateChatflowAPIKey(req as Request, chatflow) - expect(result).toBe(false) - }) - - it('should return false if supplied key does not match the expected key', async () => { - chatflow.apikeyid = 'some-api-key-id' - req.headers['authorization'] = 'Bearer invalid-key' - ;(getAPIKeys as jest.Mock).mockResolvedValue([{ id: 'some-api-key-id', apiSecret: 'expected-secret-key' }]) - ;(compareKeys as jest.Mock).mockImplementation((expected, supplied) => expected === supplied) - - const result = await validateChatflowAPIKey(req as Request, chatflow) - expect(result).toBe(false) - }) -}) diff --git a/packages/server/tsconfig.json b/packages/server/tsconfig.json index c92c623cd03..fa2e8b56fdd 100644 --- a/packages/server/tsconfig.json +++ b/packages/server/tsconfig.json @@ -15,5 +15,5 @@ "declaration": true }, "include": ["src/**/*.ts"], - "exclude": ["node_modules", "**/*.test.ts"] + "exclude": ["node_modules"] } diff --git a/packages/ui/index.html b/packages/ui/index.html index 1992cea0631..47feca6d32f 100644 --- a/packages/ui/index.html +++ b/packages/ui/index.html @@ -39,6 +39,17 @@ href="https://fonts.googleapis.com/css2?family=Inter:wght@400;500;600;700&family=Poppins:wght@400;500;600;700&family=Roboto:wght@400;500;700&display=swap" rel="stylesheet" /> + + diff --git a/packages/ui/package.json b/packages/ui/package.json index a29bccc5546..f3ed1bbdaeb 100644 --- a/packages/ui/package.json +++ b/packages/ui/package.json @@ -1,6 +1,6 @@ { "name": "flowise-ui", - "version": "3.0.0", + "version": "3.0.1", "license": "SEE LICENSE IN LICENSE.md", "homepage": "https://flowiseai.com", "author": { @@ -10,7 +10,8 @@ "dependencies": { "@codemirror/lang-javascript": "^6.2.1", "@codemirror/lang-json": "^6.0.1", - "@codemirror/view": "^6.22.3", + "@codemirror/lang-markdown": "^6.2.5", + "@codemirror/view": "^6.26.3", "@emotion/cache": "^11.4.0", "@emotion/react": "^11.10.6", "@emotion/styled": "^11.10.6", @@ -23,6 +24,7 @@ "@mui/system": "^6.4.3", "@mui/x-data-grid": "6.8.0", "@mui/x-tree-view": "^7.25.0", + "@reduxjs/toolkit": "^2.2.7", "@tabler/icons-react": "^3.30.0", "@tiptap/extension-mention": "^2.11.5", "@tiptap/extension-placeholder": "^2.11.5", @@ -60,6 +62,7 @@ "react-router-dom": "~6.3.0", "react-syntax-highlighter": "^15.5.0", "reactflow": "^11.5.6", + "recharts": "^2.12.6", "redux": "^4.0.5", "rehype-mathjax": "^4.0.2", "rehype-raw": "^7.0.0", diff --git a/packages/ui/src/ErrorBoundary.jsx b/packages/ui/src/ErrorBoundary.jsx index 9745013fa00..25e95550883 100644 --- a/packages/ui/src/ErrorBoundary.jsx +++ b/packages/ui/src/ErrorBoundary.jsx @@ -30,7 +30,7 @@ const ErrorBoundary = ({ error }) => {
                             {`Status: ${error.response.status}`}
                             
- {error.response.data.message} + {error.response?.data?.message}
diff --git a/packages/ui/src/api/account.api.js b/packages/ui/src/api/account.api.js new file mode 100644 index 00000000000..7da3d6a6cfe --- /dev/null +++ b/packages/ui/src/api/account.api.js @@ -0,0 +1,27 @@ +import client from '@/api/client' + +const inviteAccount = (body) => client.post(`/account/invite`, body) +const registerAccount = (body) => client.post(`/account/register`, body) +const verifyAccountEmail = (body) => client.post('/account/verify', body) +const resendVerificationEmail = (body) => client.post('/account/resend-verification', body) +const forgotPassword = (body) => client.post('/account/forgot-password', body) +const resetPassword = (body) => client.post('/account/reset-password', body) +const getBillingData = () => client.get('/account/billing') +const cancelSubscription = (body) => client.post('/account/cancel-subscription', body) +const logout = () => client.post('/account/logout') +const getBasicAuth = () => client.get('/account/basic-auth') +const checkBasicAuth = (body) => client.post('/account/basic-auth', body) + +export default { + getBillingData, + inviteAccount, + registerAccount, + verifyAccountEmail, + resendVerificationEmail, + forgotPassword, + resetPassword, + cancelSubscription, + logout, + getBasicAuth, + checkBasicAuth +} diff --git a/packages/ui/src/api/audit.js b/packages/ui/src/api/audit.js new file mode 100644 index 00000000000..197180a3d9d --- /dev/null +++ b/packages/ui/src/api/audit.js @@ -0,0 +1,9 @@ +import client from './client' + +const fetchLoginActivity = (body) => client.post(`/audit/login-activity`, body) +const deleteLoginActivity = (body) => client.post(`/audit/login-activity/delete`, body) + +export default { + fetchLoginActivity, + deleteLoginActivity +} diff --git a/packages/ui/src/api/auth.js b/packages/ui/src/api/auth.js new file mode 100644 index 00000000000..37b18d2de28 --- /dev/null +++ b/packages/ui/src/api/auth.js @@ -0,0 +1,14 @@ +import client from './client' + +// auth +const resolveLogin = (body) => client.post(`/auth/resolve`, body) +const login = (body) => client.post(`/auth/login`, body) + +// permissions +const getAllPermissions = () => client.get(`/auth/permissions`) + +export default { + resolveLogin, + login, + getAllPermissions +} diff --git a/packages/ui/src/api/chatflows.js b/packages/ui/src/api/chatflows.js index 8bd47f4f743..3176947d15b 100644 --- a/packages/ui/src/api/chatflows.js +++ b/packages/ui/src/api/chatflows.js @@ -20,6 +20,8 @@ const getIsChatflowStreaming = (id) => client.get(`/chatflows-streaming/${id}`) const getAllowChatflowUploads = (id) => client.get(`/chatflows-uploads/${id}`) +const getHasChatflowChanged = (id, lastUpdatedDateTime) => client.get(`/chatflows/has-changed/${id}/${lastUpdatedDateTime}`) + const generateAgentflow = (body) => client.post(`/agentflowv2-generator/generate`, body) export default { @@ -33,5 +35,6 @@ export default { deleteChatflow, getIsChatflowStreaming, getAllowChatflowUploads, + getHasChatflowChanged, generateAgentflow } diff --git a/packages/ui/src/api/client.js b/packages/ui/src/api/client.js index d2dd873333b..205e491559d 100644 --- a/packages/ui/src/api/client.js +++ b/packages/ui/src/api/client.js @@ -1,26 +1,39 @@ import axios from 'axios' -import { baseURL } from '@/store/constant' +import { baseURL, ErrorMessage } from '@/store/constant' +import AuthUtils from '@/utils/authUtils' const apiClient = axios.create({ baseURL: `${baseURL}/api/v1`, headers: { 'Content-type': 'application/json', 'x-request-from': 'internal' - } + }, + withCredentials: true }) -apiClient.interceptors.request.use(function (config) { - const username = localStorage.getItem('username') - const password = localStorage.getItem('password') - - if (username && password) { - config.auth = { - username, - password +apiClient.interceptors.response.use( + function (response) { + return response + }, + async (error) => { + if (error.response.status === 401) { + // check if refresh is needed + if (error.response.data.message === ErrorMessage.TOKEN_EXPIRED && error.response.data.retry === true) { + const originalRequest = error.config + // call api to get new token + const response = await axios.post(`${baseURL}/api/v1/auth/refreshToken`, {}, { withCredentials: true }) + if (response.data.id) { + // retry the original request + return apiClient.request(originalRequest) + } + } + localStorage.removeItem('username') + localStorage.removeItem('password') + AuthUtils.removeCurrentUser() } - } - return config -}) + return Promise.reject(error) + } +) export default apiClient diff --git a/packages/ui/src/api/dataset.js b/packages/ui/src/api/dataset.js new file mode 100644 index 00000000000..21106458376 --- /dev/null +++ b/packages/ui/src/api/dataset.js @@ -0,0 +1,30 @@ +import client from './client' + +const getAllDatasets = () => client.get('/datasets') + +//dataset +const getDataset = (id) => client.get(`/datasets/set/${id}`) +const createDataset = (body) => client.post(`/datasets/set`, body) +const updateDataset = (id, body) => client.put(`/datasets/set/${id}`, body) +const deleteDataset = (id) => client.delete(`/datasets/set/${id}`) + +//rows +const createDatasetRow = (body) => client.post(`/datasets/rows`, body) +const updateDatasetRow = (id, body) => client.put(`/datasets/rows/${id}`, body) +const deleteDatasetRow = (id) => client.delete(`/datasets/rows/${id}`) +const deleteDatasetItems = (ids) => client.patch(`/datasets/rows`, { ids }) + +const reorderDatasetRow = (body) => client.post(`/datasets/reorder`, body) + +export default { + getAllDatasets, + getDataset, + createDataset, + updateDataset, + deleteDataset, + createDatasetRow, + updateDatasetRow, + deleteDatasetRow, + deleteDatasetItems, + reorderDatasetRow +} diff --git a/packages/ui/src/api/evaluations.js b/packages/ui/src/api/evaluations.js new file mode 100644 index 00000000000..e85942a03be --- /dev/null +++ b/packages/ui/src/api/evaluations.js @@ -0,0 +1,22 @@ +import client from './client' + +//evaluation +const getAllEvaluations = () => client.get('/evaluations') +const getIsOutdated = (id) => client.get(`/evaluations/is-outdated/${id}`) +const getEvaluation = (id) => client.get(`/evaluations/${id}`) +const createEvaluation = (body) => client.post(`/evaluations`, body) +const deleteEvaluation = (id) => client.delete(`/evaluations/${id}`) +const runAgain = (id) => client.get(`/evaluations/run-again/${id}`) +const getVersions = (id) => client.get(`/evaluations/versions/${id}`) +const deleteEvaluations = (ids, isDeleteAllVersion) => client.patch(`/evaluations`, { ids, isDeleteAllVersion }) + +export default { + createEvaluation, + deleteEvaluation, + getAllEvaluations, + getEvaluation, + getIsOutdated, + runAgain, + getVersions, + deleteEvaluations +} diff --git a/packages/ui/src/api/evaluators.js b/packages/ui/src/api/evaluators.js new file mode 100644 index 00000000000..9effe862e86 --- /dev/null +++ b/packages/ui/src/api/evaluators.js @@ -0,0 +1,17 @@ +import client from './client' + +const getAllEvaluators = () => client.get('/evaluators') + +//evaluators +const createEvaluator = (body) => client.post(`/evaluators`, body) +const getEvaluator = (id) => client.get(`/evaluators/${id}`) +const updateEvaluator = (id, body) => client.put(`/evaluators/${id}`, body) +const deleteEvaluator = (id) => client.delete(`/evaluators/${id}`) + +export default { + getAllEvaluators, + createEvaluator, + getEvaluator, + updateEvaluator, + deleteEvaluator +} diff --git a/packages/ui/src/api/files.js b/packages/ui/src/api/files.js new file mode 100644 index 00000000000..3a082afc5d6 --- /dev/null +++ b/packages/ui/src/api/files.js @@ -0,0 +1,10 @@ +import client from './client' + +const getAllFiles = () => client.get('/files') + +const deleteFile = (path) => client.delete(`/files`, { params: { path } }) + +export default { + getAllFiles, + deleteFile +} diff --git a/packages/ui/src/api/log.js b/packages/ui/src/api/log.js new file mode 100644 index 00000000000..2b31300964a --- /dev/null +++ b/packages/ui/src/api/log.js @@ -0,0 +1,7 @@ +import client from './client' + +const getLogs = (startDate, endDate) => client.get(`/logs?startDate=${startDate}&endDate=${endDate}`) + +export default { + getLogs +} diff --git a/packages/ui/src/api/loginmethod.js b/packages/ui/src/api/loginmethod.js new file mode 100644 index 00000000000..2c6928d4270 --- /dev/null +++ b/packages/ui/src/api/loginmethod.js @@ -0,0 +1,16 @@ +import client from '@/api/client' + +// TODO: use this endpoint but without the org id because org id will be null +const getLoginMethods = (organizationId) => client.get(`/loginmethod?organizationId=${organizationId}`) +// TODO: don't use this endpoint. +const getDefaultLoginMethods = () => client.get(`/loginmethod/default`) +const updateLoginMethods = (body) => client.put(`/loginmethod`, body) + +const testLoginMethod = (body) => client.post(`/loginmethod/test`, body) + +export default { + getLoginMethods, + updateLoginMethods, + testLoginMethod, + getDefaultLoginMethods +} diff --git a/packages/ui/src/api/nodes.js b/packages/ui/src/api/nodes.js index d0ef3a74add..f9eff943d32 100644 --- a/packages/ui/src/api/nodes.js +++ b/packages/ui/src/api/nodes.js @@ -7,9 +7,12 @@ const getNodesByCategory = (name) => client.get(`/nodes/category/${name}`) const executeCustomFunctionNode = (body) => client.post(`/node-custom-function`, body) +const executeNodeLoadMethod = (name, body) => client.post(`/node-load-method/${name}`, body) + export default { getAllNodes, getSpecificNode, executeCustomFunctionNode, - getNodesByCategory + getNodesByCategory, + executeNodeLoadMethod } diff --git a/packages/ui/src/api/platformsettings.js b/packages/ui/src/api/platformsettings.js new file mode 100644 index 00000000000..4f5278a1d75 --- /dev/null +++ b/packages/ui/src/api/platformsettings.js @@ -0,0 +1,7 @@ +import client from './client' + +const getSettings = () => client.get('/settings') + +export default { + getSettings +} diff --git a/packages/ui/src/api/pricing.js b/packages/ui/src/api/pricing.js new file mode 100644 index 00000000000..efd5b4751b5 --- /dev/null +++ b/packages/ui/src/api/pricing.js @@ -0,0 +1,7 @@ +import client from '@/api/client' + +const getPricingPlans = (body) => client.get(`/pricing`, body) + +export default { + getPricingPlans +} diff --git a/packages/ui/src/api/role.js b/packages/ui/src/api/role.js new file mode 100644 index 00000000000..632e13a64e1 --- /dev/null +++ b/packages/ui/src/api/role.js @@ -0,0 +1,17 @@ +import client from './client' + +const getAllRolesByOrganizationId = (organizationId) => client.get(`/role?organizationId=${organizationId}`) +const getRoleById = (id) => client.get(`/auth/roles/${id}`) +const createRole = (body) => client.post(`/role`, body) +const updateRole = (body) => client.put(`/role`, body) +const getRoleByName = (name) => client.get(`/auth/roles/name/${name}`) +const deleteRole = (id, organizationId) => client.delete(`/role?id=${id}&organizationId=${organizationId}`) + +export default { + getAllRolesByOrganizationId, + getRoleById, + createRole, + updateRole, + getRoleByName, + deleteRole +} diff --git a/packages/ui/src/api/sso.js b/packages/ui/src/api/sso.js new file mode 100644 index 00000000000..81855fc007d --- /dev/null +++ b/packages/ui/src/api/sso.js @@ -0,0 +1,7 @@ +import client from './client' + +const ssoLogin = (providerName) => client.get(`/${providerName}/login`) + +export default { + ssoLogin +} diff --git a/packages/ui/src/api/user.js b/packages/ui/src/api/user.js new file mode 100644 index 00000000000..86165ec9ca7 --- /dev/null +++ b/packages/ui/src/api/user.js @@ -0,0 +1,59 @@ +import client from './client' + +// users +const getUserById = (id) => client.get(`/user?id=${id}`) +const updateUser = (body) => client.put(`/user`, body) + +// organization users +const getAllUsersByOrganizationId = (organizationId) => client.get(`/organizationuser?organizationId=${organizationId}`) +const getUserByUserIdOrganizationId = (organizationId, userId) => + client.get(`/organizationuser?organizationId=${organizationId}&userId=${userId}`) +const getOrganizationsByUserId = (userId) => client.get(`/organizationuser?userId=${userId}`) +const updateOrganizationUser = (body) => client.put(`/organizationuser`, body) +const deleteOrganizationUser = (organizationId, userId) => + client.delete(`/organizationuser?organizationId=${organizationId}&userId=${userId}`) + +const getAdditionalSeatsQuantity = (subscriptionId) => + client.get(`/organization/additional-seats-quantity?subscriptionId=${subscriptionId}`) +const getCustomerDefaultSource = (customerId) => client.get(`/organization/customer-default-source?customerId=${customerId}`) +const getAdditionalSeatsProration = (subscriptionId, quantity) => + client.get(`/organization/additional-seats-proration?subscriptionId=${subscriptionId}&quantity=${quantity}`) +const updateAdditionalSeats = (subscriptionId, quantity, prorationDate) => + client.post(`/organization/update-additional-seats`, { subscriptionId, quantity, prorationDate }) +const getPlanProration = (subscriptionId, newPlanId) => + client.get(`/organization/plan-proration?subscriptionId=${subscriptionId}&newPlanId=${newPlanId}`) +const updateSubscriptionPlan = (subscriptionId, newPlanId, prorationDate) => + client.post(`/organization/update-subscription-plan`, { subscriptionId, newPlanId, prorationDate }) +const getCurrentUsage = () => client.get(`/organization/get-current-usage`) + +// workspace users +const getAllUsersByWorkspaceId = (workspaceId) => client.get(`/workspaceuser?workspaceId=${workspaceId}`) +const getUserByRoleId = (roleId) => client.get(`/workspaceuser?roleId=${roleId}`) +const getUserByUserIdWorkspaceId = (userId, workspaceId) => client.get(`/workspaceuser?userId=${userId}&workspaceId=${workspaceId}`) +const getWorkspacesByUserId = (userId) => client.get(`/workspaceuser?userId=${userId}`) +const getWorkspacesByOrganizationIdUserId = (organizationId, userId) => + client.get(`/workspaceuser?organizationId=${organizationId}&userId=${userId}`) +const deleteWorkspaceUser = (workspaceId, userId) => client.delete(`/workspaceuser?workspaceId=${workspaceId}&userId=${userId}`) + +export default { + getUserById, + updateUser, + getAllUsersByOrganizationId, + getUserByUserIdOrganizationId, + getOrganizationsByUserId, + getAllUsersByWorkspaceId, + getUserByRoleId, + getUserByUserIdWorkspaceId, + getWorkspacesByUserId, + getWorkspacesByOrganizationIdUserId, + updateOrganizationUser, + deleteWorkspaceUser, + getAdditionalSeatsQuantity, + getCustomerDefaultSource, + getAdditionalSeatsProration, + updateAdditionalSeats, + getPlanProration, + updateSubscriptionPlan, + getCurrentUsage, + deleteOrganizationUser +} diff --git a/packages/ui/src/api/workspace.js b/packages/ui/src/api/workspace.js new file mode 100644 index 00000000000..1ceb99bb95c --- /dev/null +++ b/packages/ui/src/api/workspace.js @@ -0,0 +1,30 @@ +import client from './client' + +const getAllWorkspacesByOrganizationId = (organizationId) => client.get(`/workspace?organizationId=${organizationId}`) + +const getWorkspaceById = (id) => client.get(`/workspace?id=${id}`) + +const unlinkUsers = (id, body) => client.post(`/workspace/unlink-users/${id}`, body) +const linkUsers = (id, body) => client.post(`/workspace/link-users/${id}`, body) + +const switchWorkspace = (id) => client.post(`/workspace/switch?id=${id}`) + +const createWorkspace = (body) => client.post(`/workspace`, body) +const updateWorkspace = (body) => client.put(`/workspace`, body) +const deleteWorkspace = (id) => client.delete(`/workspace/${id}`) + +const getSharedWorkspacesForItem = (id) => client.get(`/workspace/shared/${id}`) +const setSharedWorkspacesForItem = (id, body) => client.post(`/workspace/shared/${id}`, body) + +export default { + getAllWorkspacesByOrganizationId, + getWorkspaceById, + createWorkspace, + updateWorkspace, + deleteWorkspace, + unlinkUsers, + linkUsers, + switchWorkspace, + getSharedWorkspacesForItem, + setSharedWorkspacesForItem +} diff --git a/packages/ui/src/assets/images/auth0.svg b/packages/ui/src/assets/images/auth0.svg new file mode 100644 index 00000000000..e02b0da74b4 --- /dev/null +++ b/packages/ui/src/assets/images/auth0.svg @@ -0,0 +1,2 @@ + + \ No newline at end of file diff --git a/packages/ui/src/assets/images/contact_support.svg b/packages/ui/src/assets/images/contact_support.svg new file mode 100644 index 00000000000..c429f050435 --- /dev/null +++ b/packages/ui/src/assets/images/contact_support.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/packages/ui/src/assets/images/empty_datasets.svg b/packages/ui/src/assets/images/empty_datasets.svg new file mode 100644 index 00000000000..907b4545e63 --- /dev/null +++ b/packages/ui/src/assets/images/empty_datasets.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/packages/ui/src/assets/images/empty_evals.svg b/packages/ui/src/assets/images/empty_evals.svg new file mode 100644 index 00000000000..cb879aace6f --- /dev/null +++ b/packages/ui/src/assets/images/empty_evals.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/packages/ui/src/assets/images/empty_evaluators.svg b/packages/ui/src/assets/images/empty_evaluators.svg new file mode 100644 index 00000000000..222d0f81193 --- /dev/null +++ b/packages/ui/src/assets/images/empty_evaluators.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/packages/ui/src/assets/images/github.svg b/packages/ui/src/assets/images/github.svg new file mode 100644 index 00000000000..9c6b13d6763 --- /dev/null +++ b/packages/ui/src/assets/images/github.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/packages/ui/src/assets/images/google.svg b/packages/ui/src/assets/images/google.svg new file mode 100644 index 00000000000..088288fa3fb --- /dev/null +++ b/packages/ui/src/assets/images/google.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/packages/ui/src/assets/images/logs_empty.svg b/packages/ui/src/assets/images/logs_empty.svg new file mode 100644 index 00000000000..61df7e32ead --- /dev/null +++ b/packages/ui/src/assets/images/logs_empty.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/packages/ui/src/assets/images/microsoft-azure.svg b/packages/ui/src/assets/images/microsoft-azure.svg new file mode 100644 index 00000000000..1f739764834 --- /dev/null +++ b/packages/ui/src/assets/images/microsoft-azure.svg @@ -0,0 +1 @@ +MS-SymbolLockup \ No newline at end of file diff --git a/packages/ui/src/assets/images/roles_empty.svg b/packages/ui/src/assets/images/roles_empty.svg new file mode 100644 index 00000000000..154e89dcada --- /dev/null +++ b/packages/ui/src/assets/images/roles_empty.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/packages/ui/src/assets/images/unauthorized.svg b/packages/ui/src/assets/images/unauthorized.svg new file mode 100644 index 00000000000..0b3db213b45 --- /dev/null +++ b/packages/ui/src/assets/images/unauthorized.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/packages/ui/src/assets/images/users_empty.svg b/packages/ui/src/assets/images/users_empty.svg new file mode 100644 index 00000000000..4b6fb6d9776 --- /dev/null +++ b/packages/ui/src/assets/images/users_empty.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/packages/ui/src/assets/images/workspaces_empty.svg b/packages/ui/src/assets/images/workspaces_empty.svg new file mode 100644 index 00000000000..43ae0cd00a1 --- /dev/null +++ b/packages/ui/src/assets/images/workspaces_empty.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/packages/ui/src/hooks/useApi.jsx b/packages/ui/src/hooks/useApi.jsx index 932f0a6e04b..58f8bf09cd9 100644 --- a/packages/ui/src/hooks/useApi.jsx +++ b/packages/ui/src/hooks/useApi.jsx @@ -1,25 +1,30 @@ import { useState } from 'react' +import { useError } from '@/store/context/ErrorContext' export default (apiFunc) => { const [data, setData] = useState(null) - const [error, setError] = useState(null) const [loading, setLoading] = useState(false) + const [error, setApiError] = useState(null) + const { setError, handleError } = useError() const request = async (...args) => { setLoading(true) try { const result = await apiFunc(...args) setData(result.data) + setError(null) + setApiError(null) } catch (err) { - setError(err || 'Unexpected Error!') + handleError(err || 'Unexpected Error!') + setApiError(err || 'Unexpected Error!') } finally { setLoading(false) } } return { - data, error, + data, loading, request } diff --git a/packages/ui/src/hooks/useAuth.jsx b/packages/ui/src/hooks/useAuth.jsx new file mode 100644 index 00000000000..8a8f2635b08 --- /dev/null +++ b/packages/ui/src/hooks/useAuth.jsx @@ -0,0 +1,54 @@ +import { useSelector } from 'react-redux' +import { useConfig } from '@/store/context/ConfigContext' + +export const useAuth = () => { + const { isOpenSource } = useConfig() + const permissions = useSelector((state) => state.auth.permissions) + const features = useSelector((state) => state.auth.features) + const isGlobal = useSelector((state) => state.auth.isGlobal) + const currentUser = useSelector((state) => state.auth.user) + + const hasPermission = (permissionId) => { + if (isOpenSource || isGlobal) { + return true + } + if (!permissionId) return false + const permissionIds = permissionId.split(',') + if (permissions && permissions.length) { + return permissionIds.some((permissionId) => permissions.includes(permissionId)) + } + return false + } + + const hasAssignedWorkspace = (workspaceId) => { + if (isOpenSource || isGlobal) { + return true + } + const activeWorkspaceId = currentUser?.activeWorkspaceId || '' + if (workspaceId === activeWorkspaceId) { + return true + } + return false + } + + const hasDisplay = (display) => { + if (!display) { + return true + } + + // if it has display flag, but user has no features, then it should not be displayed + if (!features || Array.isArray(features) || Object.keys(features).length === 0) { + return false + } + + // check if the display flag is in the features + if (Object.hasOwnProperty.call(features, display)) { + const flag = features[display] === 'true' || features[display] === true + return flag + } + + return false + } + + return { hasPermission, hasAssignedWorkspace, hasDisplay } +} diff --git a/packages/ui/src/index.jsx b/packages/ui/src/index.jsx index bf2d49b0e1e..d8833eed413 100644 --- a/packages/ui/src/index.jsx +++ b/packages/ui/src/index.jsx @@ -12,6 +12,8 @@ import { Provider } from 'react-redux' import { SnackbarProvider } from 'notistack' import ConfirmContextProvider from '@/store/context/ConfirmContextProvider' import { ReactFlowContext } from '@/store/context/ReactFlowContext' +import { ConfigProvider } from '@/store/context/ConfigContext' +import { ErrorProvider } from '@/store/context/ErrorContext' const container = document.getElementById('root') const root = createRoot(container) @@ -21,11 +23,15 @@ root.render( - - - - - + + + + + + + + + diff --git a/packages/ui/src/layout/AuthLayout/index.jsx b/packages/ui/src/layout/AuthLayout/index.jsx new file mode 100644 index 00000000000..a93896e0ef4 --- /dev/null +++ b/packages/ui/src/layout/AuthLayout/index.jsx @@ -0,0 +1,29 @@ +import { Outlet } from 'react-router-dom' +import { Box, useTheme } from '@mui/material' + +// ==============================|| MINIMAL LAYOUT ||============================== // + +const AuthLayout = () => { + const theme = useTheme() + + return ( + + + + ) +} + +export default AuthLayout diff --git a/packages/ui/src/layout/MainLayout/Header/OrgWorkspaceBreadcrumbs/index.jsx b/packages/ui/src/layout/MainLayout/Header/OrgWorkspaceBreadcrumbs/index.jsx new file mode 100644 index 00000000000..2381dfee072 --- /dev/null +++ b/packages/ui/src/layout/MainLayout/Header/OrgWorkspaceBreadcrumbs/index.jsx @@ -0,0 +1,435 @@ +import { useState, useEffect } from 'react' +import { useNavigate } from 'react-router-dom' +import { useSelector } from 'react-redux' + +// material-ui +import { + Breadcrumbs, + Menu, + MenuItem, + Dialog, + DialogContent, + CircularProgress, + Typography, + Stack, + Chip, + ListItemText, + ListItemIcon, + Select +} from '@mui/material' +import { Check } from '@mui/icons-material' +import { alpha, styled, emphasize } from '@mui/material/styles' + +import { IconChevronDown } from '@tabler/icons-react' + +// api +import userApi from '@/api/user' +import workspaceApi from '@/api/workspace' + +// hooks +import useApi from '@/hooks/useApi' + +// store +import { store } from '@/store' +import { workspaceSwitchSuccess } from '@/store/reducers/authSlice' + +// ==============================|| OrgWorkspaceBreadcrumbs ||============================== // + +const StyledMenu = styled((props) => ( +
+))(({ theme }) => ({ + '& .MuiPaper-root': { + borderRadius: 6, + marginTop: theme.spacing(1), + minWidth: 180, + boxShadow: + 'rgb(255, 255, 255) 0px 0px 0px 0px, rgba(0, 0, 0, 0.05) 0px 0px 0px 1px, rgba(0, 0, 0, 0.1) 0px 10px 15px -3px, rgba(0, 0, 0, 0.05) 0px 4px 6px -2px', + '& .MuiMenu-list': { + padding: '4px 0' + }, + '& .MuiMenuItem-root': { + '& .MuiSvgIcon-root': { + fontSize: 18, + color: theme.palette.text.secondary, + marginRight: theme.spacing(1.5) + }, + '&:active': { + backgroundColor: alpha(theme.palette.primary.main, theme.palette.action.selectedOpacity) + } + } + } +})) + +const StyledBreadcrumb = styled(Chip)(({ theme, isDarkMode }) => { + const backgroundColor = isDarkMode ? theme.palette.grey[800] : theme.palette.grey[100] + return { + backgroundColor, + height: theme.spacing(3), + color: theme.palette.text.primary, + fontWeight: theme.typography.fontWeightRegular, + '&:hover, &:focus': { + backgroundColor: emphasize(backgroundColor, 0.06) + }, + '&:active': { + boxShadow: theme.shadows[1], + backgroundColor: emphasize(backgroundColor, 0.12) + } + } +}) + +const OrgWorkspaceBreadcrumbs = () => { + const navigate = useNavigate() + + const user = useSelector((state) => state.auth.user) + const isAuthenticated = useSelector((state) => state.auth.isAuthenticated) + const customization = useSelector((state) => state.customization) + + const [orgAnchorEl, setOrgAnchorEl] = useState(null) + const [workspaceAnchorEl, setWorkspaceAnchorEl] = useState(null) + const orgMenuOpen = Boolean(orgAnchorEl) + const workspaceMenuOpen = Boolean(workspaceAnchorEl) + + const [assignedOrganizations, setAssignedOrganizations] = useState([]) + const [activeOrganizationId, setActiveOrganizationId] = useState(undefined) + const [assignedWorkspaces, setAssignedWorkspaces] = useState([]) + const [activeWorkspaceId, setActiveWorkspaceId] = useState(undefined) + const [isWorkspaceSwitching, setIsWorkspaceSwitching] = useState(false) + const [isOrganizationSwitching, setIsOrganizationSwitching] = useState(false) + const [showWorkspaceUnavailableDialog, setShowWorkspaceUnavailableDialog] = useState(false) + + const getOrganizationsByUserIdApi = useApi(userApi.getOrganizationsByUserId) + const getWorkspacesByUserIdApi = useApi(userApi.getWorkspacesByUserId) + const switchWorkspaceApi = useApi(workspaceApi.switchWorkspace) + + const handleOrgClick = (event) => { + setOrgAnchorEl(event.currentTarget) + } + + const handleWorkspaceClick = (event) => { + setWorkspaceAnchorEl(event.currentTarget) + } + + const handleOrgClose = () => { + setOrgAnchorEl(null) + } + + const handleWorkspaceClose = () => { + setWorkspaceAnchorEl(null) + } + + const handleOrgSwitch = async (orgId) => { + setOrgAnchorEl(null) + if (activeOrganizationId !== orgId) { + setIsOrganizationSwitching(true) + setActiveOrganizationId(orgId) + // Fetch workspaces for the new organization + getWorkspacesByUserIdApi.request(user.id) + } + } + + const handleUnavailableOrgSwitch = async (orgId) => { + setOrgAnchorEl(null) + setActiveOrganizationId(orgId) + // Fetch workspaces for the new organization + try { + const response = await userApi.getWorkspacesByUserId(user.id) + const workspaces = response.data + const filteredAssignedWorkspaces = workspaces.filter((item) => item.workspace.organizationId === orgId) + const formattedAssignedWorkspaces = filteredAssignedWorkspaces.map((item) => ({ + id: item.workspaceId, + name: item.workspace.name + })) + + const sortedWorkspaces = [...formattedAssignedWorkspaces].sort((a, b) => a.name.localeCompare(b.name)) + + setAssignedWorkspaces(sortedWorkspaces) + } catch (error) { + console.error('Error fetching workspaces:', error) + } + } + + const switchWorkspace = async (id) => { + setWorkspaceAnchorEl(null) + if (activeWorkspaceId !== id) { + setIsWorkspaceSwitching(true) + switchWorkspaceApi.request(id) + } + } + + useEffect(() => { + // Fetch workspaces when component mounts + if (isAuthenticated && user) { + getOrganizationsByUserIdApi.request(user.id) + } + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [isAuthenticated, user]) + + useEffect(() => { + if (getWorkspacesByUserIdApi.data) { + const filteredAssignedWorkspaces = getWorkspacesByUserIdApi.data.filter( + (item) => item.workspace.organizationId === activeOrganizationId + ) + const formattedAssignedWorkspaces = filteredAssignedWorkspaces.map((item) => ({ + id: item.workspaceId, + name: item.workspace.name + })) + + const sortedWorkspaces = [...formattedAssignedWorkspaces].sort((a, b) => a.name.localeCompare(b.name)) + + // Only check workspace availability if we're not in the process of switching organizations + if (!isOrganizationSwitching) { + setTimeout(() => { + if (user && user.activeWorkspaceId && !sortedWorkspaces.find((item) => item.id === user.activeWorkspaceId)) { + setShowWorkspaceUnavailableDialog(true) + } + }, 500) + } + + setAssignedWorkspaces(sortedWorkspaces) + + if (isOrganizationSwitching && sortedWorkspaces.length > 0) { + // After organization switch, switch to the first workspace in the list + switchWorkspaceApi.request(sortedWorkspaces[0].id) + } else { + setIsOrganizationSwitching(false) + } + } + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getWorkspacesByUserIdApi.data]) + + useEffect(() => { + if (getWorkspacesByUserIdApi.error) { + setIsWorkspaceSwitching(false) + } + }, [getWorkspacesByUserIdApi.error]) + + useEffect(() => { + if (getOrganizationsByUserIdApi.data) { + const formattedAssignedOrgs = getOrganizationsByUserIdApi.data.map((organization) => ({ + id: organization.organizationId, + name: `${organization.user.name || organization.user.email}'s Organization` + })) + + const sortedOrgs = [...formattedAssignedOrgs].sort((a, b) => a.name.localeCompare(b.name)) + // Only check workspace availability after a short delay to allow store updates to complete + setTimeout(() => { + if (user && user.activeOrganizationId && !sortedOrgs.find((item) => item.id === user.activeOrganizationId)) { + setActiveOrganizationId(undefined) + setShowWorkspaceUnavailableDialog(true) + } + }, 500) + + setAssignedOrganizations(sortedOrgs) + + getWorkspacesByUserIdApi.request(user.id) + } + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getOrganizationsByUserIdApi.data]) + + useEffect(() => { + if (getOrganizationsByUserIdApi.error) { + setIsOrganizationSwitching(false) + } + }, [getOrganizationsByUserIdApi.error]) + + useEffect(() => { + if (switchWorkspaceApi.data) { + setIsWorkspaceSwitching(false) + setIsOrganizationSwitching(false) + store.dispatch(workspaceSwitchSuccess(switchWorkspaceApi.data)) + + // get the current path and navigate to the same after refresh + navigate('/', { replace: true }) + navigate(0) + } + }, [switchWorkspaceApi.data, navigate]) + + useEffect(() => { + if (switchWorkspaceApi.error) { + setIsWorkspaceSwitching(false) + setIsOrganizationSwitching(false) + } + }, [switchWorkspaceApi.error]) + + useEffect(() => { + setActiveOrganizationId(user.activeOrganizationId) + setActiveWorkspaceId(user.activeWorkspaceId) + }, [user]) + + return ( + <> + {isAuthenticated && user ? ( + <> + + {assignedOrganizations.map((org) => ( + handleOrgSwitch(org.id)} selected={org.id === activeOrganizationId}> + {org.name} + {org.id === activeOrganizationId && ( + + + + )} + + ))} + + + {assignedWorkspaces.map((workspace) => ( + switchWorkspace(workspace.id)} + selected={workspace.id === activeWorkspaceId} + > + {workspace.name} + {workspace.id === activeWorkspaceId && ( + + + + )} + + ))} + + + org.id === activeOrganizationId)?.name || 'Organization'} + deleteIcon={} + onDelete={handleOrgClick} + onClick={handleOrgClick} + /> + ws.id === activeWorkspaceId)?.name || 'Workspace'} + deleteIcon={} + onDelete={handleWorkspaceClick} + onClick={handleWorkspaceClick} + /> + + + ) : null} + + + + + + Switching organization... + + + + + + + + + + Switching workspace... + + + + + + + + Workspace Unavailable + {assignedWorkspaces.length > 0 && !activeOrganizationId ? ( + <> + + Your current workspace is no longer available. Please select another workspace to continue. + + + + ) : ( + <> + + Workspace is no longer available. Please select a different organization/workspace to continue. + + + {activeOrganizationId && assignedWorkspaces.length > 0 && ( + + )} + + )} + + + + + ) +} + +OrgWorkspaceBreadcrumbs.propTypes = {} + +export default OrgWorkspaceBreadcrumbs diff --git a/packages/ui/src/layout/MainLayout/Header/ProfileSection/index.jsx b/packages/ui/src/layout/MainLayout/Header/ProfileSection/index.jsx index 5e008be9259..a39bb5e1c84 100644 --- a/packages/ui/src/layout/MainLayout/Header/ProfileSection/index.jsx +++ b/packages/ui/src/layout/MainLayout/Header/ProfileSection/index.jsx @@ -1,10 +1,12 @@ -import { closeSnackbar as closeSnackbarAction, enqueueSnackbar as enqueueSnackbarAction, REMOVE_DIRTY } from '@/store/actions' -import { exportData, stringify } from '@/utils/exportImport' -import useNotifier from '@/utils/useNotifier' import PropTypes from 'prop-types' import { useEffect, useRef, useState } from 'react' import { createPortal } from 'react-dom' import { useDispatch, useSelector } from 'react-redux' +import { useNavigate } from 'react-router-dom' + +import { closeSnackbar as closeSnackbarAction, enqueueSnackbar as enqueueSnackbarAction, REMOVE_DIRTY } from '@/store/actions' +import { exportData, stringify } from '@/utils/exportImport' +import useNotifier from '@/utils/useNotifier' // material-ui import { @@ -35,22 +37,23 @@ import { useTheme } from '@mui/material/styles' import PerfectScrollbar from 'react-perfect-scrollbar' // project imports +import { PermissionListItemButton } from '@/ui-component/button/RBACButtons' import MainCard from '@/ui-component/cards/MainCard' import AboutDialog from '@/ui-component/dialog/AboutDialog' import Transitions from '@/ui-component/extended/Transitions' // assets import ExportingGIF from '@/assets/images/Exporting.gif' -import { IconFileExport, IconFileUpload, IconInfoCircle, IconLogout, IconSettings, IconX } from '@tabler/icons-react' +import { IconFileExport, IconFileUpload, IconInfoCircle, IconLogout, IconSettings, IconUserEdit, IconX } from '@tabler/icons-react' import './index.css' -//API +// API import exportImportApi from '@/api/exportimport' // Hooks import useApi from '@/hooks/useApi' +import { useConfig } from '@/store/context/ConfigContext' import { getErrorMessage } from '@/utils/errorHandler' -import { useNavigate } from 'react-router-dom' const dataToExport = [ 'Agentflows', @@ -165,21 +168,60 @@ ExportDialog.propTypes = { onExport: PropTypes.func } +const ImportDialog = ({ show }) => { + const portalElement = document.getElementById('portal') + + const component = show ? ( + + + Importing... + + + +
+ ImportingGIF + Importing data might takes a while +
+
+
+
+ ) : null + + return createPortal(component, portalElement) +} + +ImportDialog.propTypes = { + show: PropTypes.bool +} + // ==============================|| PROFILE MENU ||============================== // -const ProfileSection = ({ username, handleLogout }) => { +const ProfileSection = ({ handleLogout }) => { const theme = useTheme() const customization = useSelector((state) => state.customization) + const { isCloud } = useConfig() const [open, setOpen] = useState(false) const [aboutDialogOpen, setAboutDialogOpen] = useState(false) + const [exportDialogOpen, setExportDialogOpen] = useState(false) + const [importDialogOpen, setImportDialogOpen] = useState(false) const anchorRef = useRef(null) const inputRef = useRef() const navigate = useNavigate() + const currentUser = useSelector((state) => state.auth.user) + const isAuthenticated = useSelector((state) => state.auth.isAuthenticated) const importAllApi = useApi(exportImportApi.importData) const exportAllApi = useApi(exportImportApi.exportData) @@ -223,6 +265,7 @@ const ProfileSection = ({ username, handleLogout }) => { if (!e.target.files) return const file = e.target.files[0] + setImportDialogOpen(true) const reader = new FileReader() reader.onload = (evt) => { @@ -236,6 +279,7 @@ const ProfileSection = ({ username, handleLogout }) => { } const importAllSuccess = () => { + setImportDialogOpen(false) dispatch({ type: REMOVE_DIRTY }) enqueueSnackbar({ message: `Import All successful`, @@ -284,6 +328,7 @@ const ProfileSection = ({ username, handleLogout }) => { useEffect(() => { if (importAllApi.error) { + setImportDialogOpen(false) let errMsg = 'Invalid Imported File' let error = importAllApi.error if (error?.response?.data) { @@ -331,7 +376,6 @@ const ProfileSection = ({ username, handleLogout }) => { if (prevOpen.current === true && open === false) { anchorRef.current.focus() } - prevOpen.current = open }, [open]) @@ -380,10 +424,16 @@ const ProfileSection = ({ username, handleLogout }) => { - {username && ( + {isAuthenticated && currentUser ? ( - {username} + {currentUser.name} + + + ) : ( + + + User )} @@ -406,7 +456,8 @@ const ProfileSection = ({ username, handleLogout }) => { } }} > - { setExportDialogOpen(true) @@ -416,8 +467,9 @@ const ProfileSection = ({ username, handleLogout }) => { Export} /> - - + { importAll() @@ -427,7 +479,7 @@ const ProfileSection = ({ username, handleLogout }) => { Import} /> - + { - About Flowise} /> + Version} /> - {localStorage.getItem('username') && localStorage.getItem('password') && ( + {isAuthenticated && !currentUser.isSSO && !isCloud && ( { + setOpen(false) + navigate('/user-profile') + }} > - + - Logout} /> + Update Profile} /> )} + + + + + Logout} /> + @@ -463,12 +527,12 @@ const ProfileSection = ({ username, handleLogout }) => { setAboutDialogOpen(false)} /> setExportDialogOpen(false)} onExport={(data) => onExport(data)} /> + ) } ProfileSection.propTypes = { - username: PropTypes.string, handleLogout: PropTypes.func } diff --git a/packages/ui/src/layout/MainLayout/Header/WorkspaceSwitcher/index.jsx b/packages/ui/src/layout/MainLayout/Header/WorkspaceSwitcher/index.jsx new file mode 100644 index 00000000000..0a058d74e77 --- /dev/null +++ b/packages/ui/src/layout/MainLayout/Header/WorkspaceSwitcher/index.jsx @@ -0,0 +1,386 @@ +import { useEffect, useRef, useState } from 'react' +import { useSelector } from 'react-redux' +import { useNavigate } from 'react-router-dom' + +// material-ui +import { Check } from '@mui/icons-material' +import KeyboardArrowDownIcon from '@mui/icons-material/KeyboardArrowDown' +import { + Dialog, + DialogContent, + CircularProgress, + Button, + Select, + Typography, + Stack, + ListItemIcon, + ListItemText, + Menu, + MenuItem, + DialogActions +} from '@mui/material' +import { alpha, styled } from '@mui/material/styles' + +// api +import userApi from '@/api/user' +import workspaceApi from '@/api/workspace' +import accountApi from '@/api/account.api' + +// hooks +import useApi from '@/hooks/useApi' +import { useConfig } from '@/store/context/ConfigContext' + +// store +import { store } from '@/store' +import { logoutSuccess, workspaceSwitchSuccess } from '@/store/reducers/authSlice' + +// ==============================|| WORKSPACE SWITCHER ||============================== // + +const StyledMenu = styled((props) => ( +
+))(({ theme }) => ({ + '& .MuiPaper-root': { + borderRadius: 6, + marginTop: theme.spacing(1), + minWidth: 180, + boxShadow: + 'rgb(255, 255, 255) 0px 0px 0px 0px, rgba(0, 0, 0, 0.05) 0px 0px 0px 1px, rgba(0, 0, 0, 0.1) 0px 10px 15px -3px, rgba(0, 0, 0, 0.05) 0px 4px 6px -2px', + '& .MuiMenu-list': { + padding: '4px 0' + }, + '& .MuiMenuItem-root': { + '& .MuiSvgIcon-root': { + fontSize: 18, + color: theme.palette.text.secondary, + marginRight: theme.spacing(1.5) + }, + '&:active': { + backgroundColor: alpha(theme.palette.primary.main, theme.palette.action.selectedOpacity) + } + } + } +})) + +const WorkspaceSwitcher = () => { + const navigate = useNavigate() + + const user = useSelector((state) => state.auth.user) + const isAuthenticated = useSelector((state) => state.auth.isAuthenticated) + const features = useSelector((state) => state.auth.features) + + const { isEnterpriseLicensed } = useConfig() + + const [anchorEl, setAnchorEl] = useState(null) + const open = Boolean(anchorEl) + const prevOpen = useRef(open) + + const [assignedWorkspaces, setAssignedWorkspaces] = useState([]) + const [activeWorkspace, setActiveWorkspace] = useState(undefined) + const [isSwitching, setIsSwitching] = useState(false) + const [showWorkspaceUnavailableDialog, setShowWorkspaceUnavailableDialog] = useState(false) + const [showErrorDialog, setShowErrorDialog] = useState(false) + const [errorMessage, setErrorMessage] = useState('') + + const getWorkspacesByOrganizationIdUserIdApi = useApi(userApi.getWorkspacesByOrganizationIdUserId) + const getWorkspacesByUserIdApi = useApi(userApi.getWorkspacesByUserId) + const switchWorkspaceApi = useApi(workspaceApi.switchWorkspace) + const logoutApi = useApi(accountApi.logout) + + const handleClick = (event) => { + setAnchorEl(event.currentTarget) + } + + const handleClose = () => { + setAnchorEl(null) + } + + const switchWorkspace = async (id) => { + setAnchorEl(null) + if (activeWorkspace !== id) { + setIsSwitching(true) + switchWorkspaceApi.request(id) + } + } + + const handleLogout = () => { + logoutApi.request() + } + + useEffect(() => { + // Fetch workspaces when component mounts + if (isAuthenticated && user) { + const WORKSPACE_FLAG = 'feat:workspaces' + if (Object.hasOwnProperty.call(features, WORKSPACE_FLAG)) { + const flag = features[WORKSPACE_FLAG] === 'true' || features[WORKSPACE_FLAG] === true + if (flag) { + if (isEnterpriseLicensed) { + getWorkspacesByOrganizationIdUserIdApi.request(user.activeOrganizationId, user.id) + } else { + getWorkspacesByUserIdApi.request(user.id) + } + } + } + } + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [isAuthenticated, user, features, isEnterpriseLicensed]) + + useEffect(() => { + if (getWorkspacesByOrganizationIdUserIdApi.data) { + const formattedAssignedWorkspaces = getWorkspacesByOrganizationIdUserIdApi.data.map((item) => ({ + id: item.workspaceId, + name: item.workspace.name + })) + + const sortedWorkspaces = [...formattedAssignedWorkspaces].sort((a, b) => a.name.localeCompare(b.name)) + + // Only check workspace availability after a short delay to allow store updates to complete + setTimeout(() => { + if (user && user.activeWorkspaceId && !sortedWorkspaces.find((item) => item.id === user.activeWorkspaceId)) { + setShowWorkspaceUnavailableDialog(true) + } + }, 500) + + setAssignedWorkspaces(sortWorkspaces(sortedWorkspaces)) + } + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getWorkspacesByOrganizationIdUserIdApi.data, user.activeWorkspaceId]) + + useEffect(() => { + if (getWorkspacesByUserIdApi.data) { + const formattedAssignedWorkspaces = getWorkspacesByUserIdApi.data.map((item) => ({ + id: item.workspaceId, + name: item.workspace.name + })) + + const sortedWorkspaces = [...formattedAssignedWorkspaces].sort((a, b) => a.name.localeCompare(b.name)) + + // Only check workspace availability after a short delay to allow store updates to complete + setTimeout(() => { + if (user && user.activeWorkspaceId && !sortedWorkspaces.find((item) => item.id === user.activeWorkspaceId)) { + setShowWorkspaceUnavailableDialog(true) + } + }, 500) + + setAssignedWorkspaces(sortWorkspaces(sortedWorkspaces)) + } + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getWorkspacesByUserIdApi.data, user.activeWorkspaceId]) + + useEffect(() => { + if (switchWorkspaceApi.data) { + setIsSwitching(false) + store.dispatch(workspaceSwitchSuccess(switchWorkspaceApi.data)) + + // get the current path and navigate to the same after refresh + navigate('/', { replace: true }) + navigate(0) + } + }, [switchWorkspaceApi.data, navigate]) + + useEffect(() => { + if (switchWorkspaceApi.error) { + setIsSwitching(false) + setShowWorkspaceUnavailableDialog(false) + + // Set error message and show error dialog + setErrorMessage(switchWorkspaceApi.error.message || 'Failed to switch workspace') + setShowErrorDialog(true) + } + }, [switchWorkspaceApi.error]) + + useEffect(() => { + try { + if (logoutApi.data && logoutApi.data.message === 'logged_out') { + store.dispatch(logoutSuccess()) + window.location.href = logoutApi.data.redirectTo + } + } catch (e) { + console.error(e) + } + }, [logoutApi.data]) + + useEffect(() => { + setActiveWorkspace(user.activeWorkspace) + + prevOpen.current = open + }, [open, user]) + + const sortWorkspaces = (assignedWorkspaces) => { + // Sort workspaces alphabetically by name, with special characters last + const sortedWorkspaces = assignedWorkspaces + ? [...assignedWorkspaces].sort((a, b) => { + const isSpecialA = /^[^a-zA-Z0-9]/.test(a.name) + const isSpecialB = /^[^a-zA-Z0-9]/.test(b.name) + + // If one has special char and other doesn't, special char goes last + if (isSpecialA && !isSpecialB) return 1 + if (!isSpecialA && isSpecialB) return -1 + + // If both are special or both are not special, sort alphabetically + return a.name.localeCompare(b.name, undefined, { + numeric: true, + sensitivity: 'base' + }) + }) + : [] + return sortedWorkspaces + } + + return ( + <> + {isAuthenticated && + user && + assignedWorkspaces?.length > 1 && + !(assignedWorkspaces.length === 1 && user.activeWorkspace === 'Default Workspace') ? ( + <> + + + {assignedWorkspaces.map((item, index) => ( + { + switchWorkspace(item.id) + }} + key={index} + disableRipple + > + {item.id === user.activeWorkspaceId ? ( + <> + + + + {item.name} + + ) : ( + {item.name} + )} + + ))} + + + ) : null} + + + + + + Switching workspace... + + + + + + + + + Workspace Unavailable + + Your current workspace is no longer available. Please select another workspace to continue. + + + + + {assignedWorkspaces.length === 0 && ( + + + + )} + + + {/* Error Dialog */} + + + + Workspace Switch Error + {errorMessage} + {isEnterpriseLicensed && ( + + Please contact your administrator for assistance. + + )} + + + + + + + + ) +} + +WorkspaceSwitcher.propTypes = {} + +export default WorkspaceSwitcher diff --git a/packages/ui/src/layout/MainLayout/Header/index.jsx b/packages/ui/src/layout/MainLayout/Header/index.jsx index 53d6402bdf2..ae95c1eefef 100644 --- a/packages/ui/src/layout/MainLayout/Header/index.jsx +++ b/packages/ui/src/layout/MainLayout/Header/index.jsx @@ -1,22 +1,35 @@ import PropTypes from 'prop-types' import { useSelector, useDispatch } from 'react-redux' -import { useState } from 'react' +import { useEffect, useState } from 'react' import { useNavigate } from 'react-router-dom' // material-ui -import { useTheme } from '@mui/material/styles' -import { Avatar, Box, ButtonBase, Switch } from '@mui/material' -import { styled } from '@mui/material/styles' +import { Button, Avatar, Box, ButtonBase, Switch, Typography, Link } from '@mui/material' +import { useTheme, styled, darken } from '@mui/material/styles' // project imports import LogoSection from '../LogoSection' import ProfileSection from './ProfileSection' +import WorkspaceSwitcher from '@/layout/MainLayout/Header/WorkspaceSwitcher' +import OrgWorkspaceBreadcrumbs from '@/layout/MainLayout/Header/OrgWorkspaceBreadcrumbs' +import PricingDialog from '@/ui-component/subscription/PricingDialog' // assets -import { IconMenu2 } from '@tabler/icons-react' +import { IconMenu2, IconX, IconSparkles } from '@tabler/icons-react' // store +import { store } from '@/store' import { SET_DARKMODE } from '@/store/actions' +import { useConfig } from '@/store/context/ConfigContext' +import { enqueueSnackbar as enqueueSnackbarAction, closeSnackbar as closeSnackbarAction } from '@/store/actions' +import { logoutSuccess } from '@/store/reducers/authSlice' + +// API +import accountApi from '@/api/account.api' + +// Hooks +import useApi from '@/hooks/useApi' +import useNotifier from '@/utils/useNotifier' // ==============================|| MAIN NAVBAR / HEADER ||============================== // @@ -67,14 +80,87 @@ const MaterialUISwitch = styled(Switch)(({ theme }) => ({ } })) +const GitHubStarButton = ({ starCount, isDark }) => { + const theme = useTheme() + + const formattedStarCount = starCount.toLocaleString() + + return ( + + + + + + + + Star + + + + + {formattedStarCount} + + + + + ) +} + +GitHubStarButton.propTypes = { + starCount: PropTypes.number.isRequired, + isDark: PropTypes.bool.isRequired +} + const Header = ({ handleLeftDrawerToggle }) => { const theme = useTheme() const navigate = useNavigate() const customization = useSelector((state) => state.customization) + const logoutApi = useApi(accountApi.logout) const [isDark, setIsDark] = useState(customization.isDarkMode) const dispatch = useDispatch() + const { isEnterpriseLicensed, isCloud, isOpenSource } = useConfig() + const currentUser = useSelector((state) => state.auth.user) + const isAuthenticated = useSelector((state) => state.auth.isAuthenticated) + const [isPricingOpen, setIsPricingOpen] = useState(false) + const [starCount, setStarCount] = useState(0) + + useNotifier() + + const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) + const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) const changeDarkMode = () => { dispatch({ type: SET_DARKMODE, isDarkMode: !isDark }) @@ -83,15 +169,52 @@ const Header = ({ handleLeftDrawerToggle }) => { } const signOutClicked = () => { - localStorage.removeItem('username') - localStorage.removeItem('password') - navigate('/', { replace: true }) - navigate(0) + logoutApi.request() + enqueueSnackbar({ + message: 'Logging out...', + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) } + useEffect(() => { + try { + if (logoutApi.data && logoutApi.data.message === 'logged_out') { + store.dispatch(logoutSuccess()) + window.location.href = logoutApi.data.redirectTo + } + } catch (e) { + console.error(e) + } + }, [logoutApi.data]) + + useEffect(() => { + if (isCloud || isOpenSource) { + const fetchStarCount = async () => { + try { + const response = await fetch('https://api.github.com/repos/FlowiseAI/Flowise') + const data = await response.json() + if (data.stargazers_count) { + setStarCount(data.stargazers_count) + } + } catch (error) { + setStarCount(0) + } + } + + fetchStarCount() + } + }, [isCloud, isOpenSource]) + return ( <> - {/* logo & toggler button */} { - - - - - + {isAuthenticated && ( + + + + + + )} - + {isCloud || isOpenSource ? ( + + + + ) : ( + + )} + {isEnterpriseLicensed && isAuthenticated && } + {isCloud && isAuthenticated && } + {isCloud && currentUser?.isOrganizationAdmin && ( + + )} + {isPricingOpen && isCloud && ( + { + setIsPricingOpen(false) + if (planUpdated) { + navigate('/') + navigate(0) + } + }} + /> + )} - + ) } diff --git a/packages/ui/src/layout/MainLayout/Sidebar/CloudMenuList.jsx b/packages/ui/src/layout/MainLayout/Sidebar/CloudMenuList.jsx new file mode 100644 index 00000000000..61032a1c736 --- /dev/null +++ b/packages/ui/src/layout/MainLayout/Sidebar/CloudMenuList.jsx @@ -0,0 +1,111 @@ +import { useEffect } from 'react' +import { useDispatch, useSelector } from 'react-redux' +import { enqueueSnackbar as enqueueSnackbarAction, closeSnackbar as closeSnackbarAction } from '@/store/actions' +import { store } from '@/store' + +// material-ui +import { Divider, Box, Button, List, ListItemButton, ListItemIcon, Typography } from '@mui/material' +import { useTheme } from '@mui/material/styles' + +// project imports +import useNotifier from '@/utils/useNotifier' +import { useConfig } from '@/store/context/ConfigContext' + +// API +import { logoutSuccess } from '@/store/reducers/authSlice' + +// Hooks +import useApi from '@/hooks/useApi' + +// icons +import { IconFileText, IconLogout, IconX } from '@tabler/icons-react' +import accountApi from '@/api/account.api' + +const CloudMenuList = () => { + const customization = useSelector((state) => state.customization) + const dispatch = useDispatch() + useNotifier() + const theme = useTheme() + + const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) + const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) + + const logoutApi = useApi(accountApi.logout) + const { isCloud } = useConfig() + + const signOutClicked = () => { + logoutApi.request() + enqueueSnackbar({ + message: 'Logging out...', + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + } + + useEffect(() => { + try { + if (logoutApi.data && logoutApi.data.message === 'logged_out') { + store.dispatch(logoutSuccess()) + window.location.href = logoutApi.data.redirectTo + } + } catch (e) { + console.error(e) + } + }, [logoutApi.data]) + + return ( + <> + {isCloud && ( + + + + + + + + + + Documentation + + + + + + + + + Logout + + + + + )} + + ) +} + +export default CloudMenuList diff --git a/packages/ui/src/layout/MainLayout/Sidebar/MenuList/NavGroup/index.jsx b/packages/ui/src/layout/MainLayout/Sidebar/MenuList/NavGroup/index.jsx index 0625d54e700..f965e0e72f4 100644 --- a/packages/ui/src/layout/MainLayout/Sidebar/MenuList/NavGroup/index.jsx +++ b/packages/ui/src/layout/MainLayout/Sidebar/MenuList/NavGroup/index.jsx @@ -7,19 +7,25 @@ import { Divider, List, Typography } from '@mui/material' // project imports import NavItem from '../NavItem' import NavCollapse from '../NavCollapse' +import { useAuth } from '@/hooks/useAuth' +import { Available } from '@/ui-component/rbac/available' // ==============================|| SIDEBAR MENU LIST GROUP ||============================== // const NavGroup = ({ item }) => { const theme = useTheme() + const { hasPermission, hasDisplay } = useAuth() - // menu list collapse & items - const items = item.children?.map((menu) => { + const listItems = (menu, level = 1) => { + // Filter based on display and permission + if (!shouldDisplayMenu(menu)) return null + + // Handle item and group types switch (menu.type) { case 'collapse': - return + return case 'item': - return + return default: return ( @@ -27,7 +33,40 @@ const NavGroup = ({ item }) => { ) } - }) + } + + const shouldDisplayMenu = (menu) => { + // Handle permission check + if (menu.permission && !hasPermission(menu.permission)) { + return false // Do not render if permission is lacking + } + + // If `display` is defined, check against cloud/enterprise conditions + if (menu.display) { + const shouldsiplay = hasDisplay(menu.display) + return shouldsiplay + } + + // If `display` is not defined, display by default + return true + } + + const renderPrimaryItems = () => { + const primaryGroup = item.children.find((child) => child.id === 'primary') + return primaryGroup.children + } + + const renderNonPrimaryGroups = () => { + let nonprimaryGroups = item.children.filter((child) => child.id !== 'primary') + // Display chilren based on permission and display + nonprimaryGroups = nonprimaryGroups.map((group) => { + const children = group.children.filter((menu) => shouldDisplayMenu(menu)) + return { ...group, children } + }) + // Get rid of group with empty children + nonprimaryGroups = nonprimaryGroups.filter((group) => group.children.length > 0) + return nonprimaryGroups + } return ( <> @@ -44,13 +83,31 @@ const NavGroup = ({ item }) => { ) } - sx={{ py: '20px' }} + sx={{ p: '16px', py: 2, display: 'flex', flexDirection: 'column', gap: 1 }} > - {items} + {renderPrimaryItems().map((menu) => listItems(menu))} - {/* group divider */} - + {renderNonPrimaryGroups().map((group) => { + const groupPermissions = group.children.map((menu) => menu.permission).join(',') + return ( + + <> + + + {group.title} + + } + sx={{ p: '16px', py: 2, display: 'flex', flexDirection: 'column', gap: 1 }} + > + {group.children.map((menu) => listItems(menu))} + + + + ) + })} ) } diff --git a/packages/ui/src/layout/MainLayout/Sidebar/MenuList/NavItem/index.jsx b/packages/ui/src/layout/MainLayout/Sidebar/MenuList/NavItem/index.jsx index 91a4fcf733d..10445554bd0 100644 --- a/packages/ui/src/layout/MainLayout/Sidebar/MenuList/NavItem/index.jsx +++ b/packages/ui/src/layout/MainLayout/Sidebar/MenuList/NavItem/index.jsx @@ -101,7 +101,6 @@ const NavItem = ({ item, level, navType, onClick, onUploadFile }) => { disabled={item.disabled} sx={{ borderRadius: `${customization.borderRadius}px`, - mb: 0.5, alignItems: 'flex-start', backgroundColor: level > 1 ? 'transparent !important' : 'inherit', py: level > 1 ? 1 : 1.25, diff --git a/packages/ui/src/layout/MainLayout/Sidebar/MenuList/index.jsx b/packages/ui/src/layout/MainLayout/Sidebar/MenuList/index.jsx index 1473d8a81bb..6dfb1a3c964 100644 --- a/packages/ui/src/layout/MainLayout/Sidebar/MenuList/index.jsx +++ b/packages/ui/src/layout/MainLayout/Sidebar/MenuList/index.jsx @@ -1,14 +1,14 @@ // material-ui -import { Typography } from '@mui/material' +import { Box, Typography } from '@mui/material' // project imports import NavGroup from './NavGroup' -import menuItem from '@/menu-items' +import { menuItems } from '@/menu-items' // ==============================|| SIDEBAR MENU LIST ||============================== // const MenuList = () => { - const navItems = menuItem.items.map((item) => { + const navItems = menuItems.items.map((item) => { switch (item.type) { case 'group': return @@ -21,7 +21,7 @@ const MenuList = () => { } }) - return <>{navItems} + return {navItems} } export default MenuList diff --git a/packages/ui/src/layout/MainLayout/Sidebar/TrialInfo.jsx b/packages/ui/src/layout/MainLayout/Sidebar/TrialInfo.jsx new file mode 100644 index 00000000000..0e63ce78677 --- /dev/null +++ b/packages/ui/src/layout/MainLayout/Sidebar/TrialInfo.jsx @@ -0,0 +1,58 @@ +import { Box, Skeleton, Typography } from '@mui/material' +import { useTheme } from '@mui/material/styles' +import PropTypes from 'prop-types' +import { StyledButton } from '@/ui-component/button/StyledButton' + +const TrialInfo = ({ billingPortalUrl, isLoading, paymentMethodExists, trialDaysLeft }) => { + const theme = useTheme() + + return ( + + {isLoading ? ( + + + + + ) : ( + <> + + There are{' '} + + {trialDaysLeft} days left + {' '} + in your trial. {!paymentMethodExists ? 'Update your payment method to avoid service interruption.' : ''} + + {!paymentMethodExists && ( + + + Update Payment Method + + + )} + + )} + + ) +} + +TrialInfo.propTypes = { + billingPortalUrl: PropTypes.string, + isLoading: PropTypes.bool, + paymentMethodExists: PropTypes.bool, + trialDaysLeft: PropTypes.number +} + +export default TrialInfo diff --git a/packages/ui/src/layout/MainLayout/Sidebar/index.jsx b/packages/ui/src/layout/MainLayout/Sidebar/index.jsx index 5d1908c8b28..b05d6669ee8 100644 --- a/packages/ui/src/layout/MainLayout/Sidebar/index.jsx +++ b/packages/ui/src/layout/MainLayout/Sidebar/index.jsx @@ -1,4 +1,5 @@ import PropTypes from 'prop-types' +import { useSelector } from 'react-redux' // material-ui import { useTheme } from '@mui/material/styles' @@ -11,6 +12,9 @@ import { BrowserView, MobileView } from 'react-device-detect' // project imports import MenuList from './MenuList' import LogoSection from '../LogoSection' +import CloudMenuList from '@/layout/MainLayout/Sidebar/CloudMenuList' + +// store import { drawerWidth, headerHeight } from '@/store/constant' // ==============================|| SIDEBAR DRAWER ||============================== // @@ -18,6 +22,7 @@ import { drawerWidth, headerHeight } from '@/store/constant' const Sidebar = ({ drawerOpen, drawerToggle, window }) => { const theme = useTheme() const matchUpMd = useMediaQuery(theme.breakpoints.up('md')) + const isAuthenticated = useSelector((state) => state.auth.isAuthenticated) const drawer = ( <> @@ -36,16 +41,18 @@ const Sidebar = ({ drawerOpen, drawerToggle, window }) => { component='div' style={{ height: !matchUpMd ? 'calc(100vh - 56px)' : `calc(100vh - ${headerHeight}px)`, - paddingLeft: '16px', - paddingRight: '16px' + display: 'flex', + flexDirection: 'column' }} > + + @@ -62,30 +69,31 @@ const Sidebar = ({ drawerOpen, drawerToggle, window }) => { }} aria-label='mailbox folders' > - - {drawer} - + {isAuthenticated && ( + + {drawer} + + )} ) } diff --git a/packages/ui/src/layout/MainLayout/index.jsx b/packages/ui/src/layout/MainLayout/index.jsx index 236d27c97be..cab73c15a83 100644 --- a/packages/ui/src/layout/MainLayout/index.jsx +++ b/packages/ui/src/layout/MainLayout/index.jsx @@ -86,7 +86,7 @@ const MainLayout = () => { transition: leftDrawerOpened ? theme.transitions.create('width') : 'none' }} > - +
diff --git a/packages/ui/src/menu-items/agentsettings.js b/packages/ui/src/menu-items/agentsettings.js index 741ce032216..7e7eb8dae48 100644 --- a/packages/ui/src/menu-items/agentsettings.js +++ b/packages/ui/src/menu-items/agentsettings.js @@ -50,42 +50,48 @@ const agent_settings = { title: 'Configuration', type: 'item', url: '', - icon: icons.IconAdjustmentsHorizontal + icon: icons.IconAdjustmentsHorizontal, + permission: 'agentflows:config' }, { id: 'saveAsTemplate', title: 'Save As Template', type: 'item', url: '', - icon: icons.IconTemplate + icon: icons.IconTemplate, + permission: 'templates:flowexport' }, { id: 'duplicateChatflow', title: 'Duplicate Agents', type: 'item', url: '', - icon: icons.IconCopy + icon: icons.IconCopy, + permission: 'agentflows:duplicate' }, { id: 'loadChatflow', title: 'Load Agents', type: 'item', url: '', - icon: icons.IconFileUpload + icon: icons.IconFileUpload, + permission: 'agentflows:import' }, { id: 'exportChatflow', title: 'Export Agents', type: 'item', url: '', - icon: icons.IconFileExport + icon: icons.IconFileExport, + permission: 'agentflows:export' }, { id: 'deleteChatflow', title: 'Delete Agents', type: 'item', url: '', - icon: icons.IconTrash + icon: icons.IconTrash, + permission: 'agentflows:delete' } ] } diff --git a/packages/ui/src/menu-items/customassistant.js b/packages/ui/src/menu-items/customassistant.js index 328b4adfcf8..dba00fbd4ba 100644 --- a/packages/ui/src/menu-items/customassistant.js +++ b/packages/ui/src/menu-items/customassistant.js @@ -35,14 +35,16 @@ const customAssistantSettings = { title: 'Configuration', type: 'item', url: '', - icon: icons.IconAdjustmentsHorizontal + icon: icons.IconAdjustmentsHorizontal, + permission: 'assistants:update' }, { id: 'deleteAssistant', title: 'Delete Assistant', type: 'item', url: '', - icon: icons.IconTrash + icon: icons.IconTrash, + permission: 'assistants:delete' } ] } diff --git a/packages/ui/src/menu-items/dashboard.js b/packages/ui/src/menu-items/dashboard.js index c9af49863f6..a320d0ca3f9 100644 --- a/packages/ui/src/menu-items/dashboard.js +++ b/packages/ui/src/menu-items/dashboard.js @@ -1,5 +1,6 @@ // assets import { + IconList, IconUsersGroup, IconHierarchy, IconBuildingStore, @@ -7,23 +8,50 @@ import { IconTool, IconLock, IconRobot, + IconSettings, IconVariable, IconFiles, + IconTestPipe, + IconMicroscope, + IconDatabase, + IconChartHistogram, + IconUserEdit, + IconFileUpload, + IconClipboardList, + IconStack2, + IconUsers, + IconLockCheck, + IconFileDatabase, + IconShieldLock, IconListCheck } from '@tabler/icons-react' // constant const icons = { - IconListCheck, - IconUsersGroup, IconHierarchy, + IconUsersGroup, IconBuildingStore, + IconList, IconKey, IconTool, IconLock, IconRobot, + IconSettings, IconVariable, - IconFiles + IconFiles, + IconTestPipe, + IconMicroscope, + IconDatabase, + IconUserEdit, + IconChartHistogram, + IconFileUpload, + IconClipboardList, + IconStack2, + IconUsers, + IconLockCheck, + IconFileDatabase, + IconShieldLock, + IconListCheck } // ==============================|| DASHBOARD MENU ITEMS ||============================== // @@ -34,84 +62,230 @@ const dashboard = { type: 'group', children: [ { - id: 'chatflows', - title: 'Chatflows', - type: 'item', - url: '/chatflows', - icon: icons.IconHierarchy, - breadcrumbs: true - }, - { - id: 'agentflows', - title: 'Agentflows', - type: 'item', - url: '/agentflows', - icon: icons.IconUsersGroup, - breadcrumbs: true - }, - { - id: 'executions', - title: 'Executions', - type: 'item', - url: '/executions', - icon: icons.IconListCheck, - breadcrumbs: true - }, - { - id: 'assistants', - title: 'Assistants', - type: 'item', - url: '/assistants', - icon: icons.IconRobot, - breadcrumbs: true - }, - { - id: 'marketplaces', - title: 'Marketplaces', - type: 'item', - url: '/marketplaces', - icon: icons.IconBuildingStore, - breadcrumbs: true - }, - { - id: 'tools', - title: 'Tools', - type: 'item', - url: '/tools', - icon: icons.IconTool, - breadcrumbs: true - }, - { - id: 'credentials', - title: 'Credentials', - type: 'item', - url: '/credentials', - icon: icons.IconLock, - breadcrumbs: true + id: 'primary', + title: '', + type: 'group', + children: [ + { + id: 'chatflows', + title: 'Chatflows', + type: 'item', + url: '/chatflows', + icon: icons.IconHierarchy, + breadcrumbs: true, + permission: 'chatflows:view' + }, + { + id: 'agentflows', + title: 'Agentflows', + type: 'item', + url: '/agentflows', + icon: icons.IconUsersGroup, + breadcrumbs: true, + permission: 'agentflows:view' + }, + { + id: 'executions', + title: 'Executions', + type: 'item', + url: '/executions', + icon: icons.IconListCheck, + breadcrumbs: true, + permission: 'executions:view' + }, + { + id: 'assistants', + title: 'Assistants', + type: 'item', + url: '/assistants', + icon: icons.IconRobot, + breadcrumbs: true, + permission: 'assistants:view' + }, + { + id: 'marketplaces', + title: 'Marketplaces', + type: 'item', + url: '/marketplaces', + icon: icons.IconBuildingStore, + breadcrumbs: true, + permission: 'templates:marketplace,templates:custom' + }, + { + id: 'tools', + title: 'Tools', + type: 'item', + url: '/tools', + icon: icons.IconTool, + breadcrumbs: true, + permission: 'tools:view' + }, + { + id: 'credentials', + title: 'Credentials', + type: 'item', + url: '/credentials', + icon: icons.IconLock, + breadcrumbs: true, + permission: 'credentials:view' + }, + { + id: 'variables', + title: 'Variables', + type: 'item', + url: '/variables', + icon: icons.IconVariable, + breadcrumbs: true, + permission: 'variables:view' + }, + { + id: 'apikey', + title: 'API Keys', + type: 'item', + url: '/apikey', + icon: icons.IconKey, + breadcrumbs: true, + permission: 'apikeys:view' + }, + { + id: 'document-stores', + title: 'Document Stores', + type: 'item', + url: '/document-stores', + icon: icons.IconFiles, + breadcrumbs: true, + permission: 'documentStores:view' + } + ] }, { - id: 'variables', - title: 'Variables', - type: 'item', - url: '/variables', - icon: icons.IconVariable, - breadcrumbs: true + id: 'evaluations', + title: 'Evaluations', + type: 'group', + children: [ + { + id: 'datasets', + title: 'Datasets', + type: 'item', + url: '/datasets', + icon: icons.IconDatabase, + breadcrumbs: true, + display: 'feat:datasets', + permission: 'datasets:view' + }, + { + id: 'evaluators', + title: 'Evaluators', + type: 'item', + url: '/evaluators', + icon: icons.IconTestPipe, + breadcrumbs: true, + display: 'feat:evaluators', + permission: 'evaluators:view' + }, + { + id: 'evaluations', + title: 'Evaluations', + type: 'item', + url: '/evaluations', + icon: icons.IconChartHistogram, + breadcrumbs: true, + display: 'feat:evaluations', + permission: 'evaluations:view' + } + ] }, { - id: 'apikey', - title: 'API Keys', - type: 'item', - url: '/apikey', - icon: icons.IconKey, - breadcrumbs: true + id: 'management', + title: 'User & Workspace Management', + type: 'group', + children: [ + { + id: 'sso', + title: 'SSO Config', + type: 'item', + url: '/sso-config', + icon: icons.IconShieldLock, + breadcrumbs: true, + display: 'feat:sso-config', + permission: 'sso:manage' + }, + { + id: 'roles', + title: 'Roles', + type: 'item', + url: '/roles', + icon: icons.IconLockCheck, + breadcrumbs: true, + display: 'feat:roles', + permission: 'roles:manage' + }, + { + id: 'users', + title: 'Users', + type: 'item', + url: '/users', + icon: icons.IconUsers, + breadcrumbs: true, + display: 'feat:users', + permission: 'users:manage' + }, + { + id: 'workspaces', + title: 'Workspaces', + type: 'item', + url: '/workspaces', + icon: icons.IconStack2, + breadcrumbs: true, + display: 'feat:workspaces', + permission: 'workspace:view' + }, + { + id: 'login-activity', + title: 'Login Activity', + type: 'item', + url: '/login-activity', + icon: icons.IconClipboardList, + breadcrumbs: true, + display: 'feat:login-activity', + permission: 'loginActivity:view' + } + ] }, { - id: 'document-stores', - title: 'Document Stores', - type: 'item', - url: '/document-stores', - icon: icons.IconFiles, - breadcrumbs: true + id: 'others', + title: 'Others', + type: 'group', + children: [ + { + id: 'logs', + title: 'Logs', + type: 'item', + url: '/logs', + icon: icons.IconList, + breadcrumbs: true, + display: 'feat:logs', + permission: 'logs:view' + }, + // { + // id: 'files', + // title: 'Files', + // type: 'item', + // url: '/files', + // icon: icons.IconFileDatabase, + // breadcrumbs: true, + // display: 'feat:files', + // }, + { + id: 'account', + title: 'Account Settings', + type: 'item', + url: '/account', + icon: icons.IconSettings, + breadcrumbs: true, + display: 'feat:account' + } + ] } ] } diff --git a/packages/ui/src/menu-items/index.js b/packages/ui/src/menu-items/index.js index bad835afbb6..533f2c6cf29 100644 --- a/packages/ui/src/menu-items/index.js +++ b/packages/ui/src/menu-items/index.js @@ -2,8 +2,6 @@ import dashboard from './dashboard' // ==============================|| MENU ITEMS ||============================== // -const menuItems = { +export const menuItems = { items: [dashboard] } - -export default menuItems diff --git a/packages/ui/src/menu-items/settings.js b/packages/ui/src/menu-items/settings.js index 94ff397c373..01b4f537823 100644 --- a/packages/ui/src/menu-items/settings.js +++ b/packages/ui/src/menu-items/settings.js @@ -57,6 +57,7 @@ const settings = { title: 'Configuration', type: 'item', url: '', + permission: 'chatflows:config', icon: icons.IconAdjustmentsHorizontal }, { @@ -64,35 +65,40 @@ const settings = { title: 'Save As Template', type: 'item', url: '', - icon: icons.IconTemplate + icon: icons.IconTemplate, + permission: 'templates:flowexport' }, { id: 'duplicateChatflow', title: 'Duplicate Chatflow', type: 'item', url: '', - icon: icons.IconCopy + icon: icons.IconCopy, + permission: 'chatflows:duplicate' }, { id: 'loadChatflow', title: 'Load Chatflow', type: 'item', url: '', - icon: icons.IconFileUpload + icon: icons.IconFileUpload, + permission: 'chatflows:import' }, { id: 'exportChatflow', title: 'Export Chatflow', type: 'item', url: '', - icon: icons.IconFileExport + icon: icons.IconFileExport, + permission: 'chatflows:export' }, { id: 'deleteChatflow', title: 'Delete Chatflow', type: 'item', url: '', - icon: icons.IconTrash + icon: icons.IconTrash, + permission: 'chatflows:delete' } ] } diff --git a/packages/ui/src/routes/AuthRoutes.jsx b/packages/ui/src/routes/AuthRoutes.jsx new file mode 100644 index 00000000000..2d63fc38719 --- /dev/null +++ b/packages/ui/src/routes/AuthRoutes.jsx @@ -0,0 +1,59 @@ +import { lazy } from 'react' + +import Loadable from '@/ui-component/loading/Loadable' +import AuthLayout from '@/layout/AuthLayout' + +const ResolveLoginPage = Loadable(lazy(() => import('@/views/auth/login'))) +const SignInPage = Loadable(lazy(() => import('@/views/auth/signIn'))) +const RegisterPage = Loadable(lazy(() => import('@/views/auth/register'))) +const VerifyEmailPage = Loadable(lazy(() => import('@/views/auth/verify-email'))) +const ForgotPasswordPage = Loadable(lazy(() => import('@/views/auth/forgotPassword'))) +const ResetPasswordPage = Loadable(lazy(() => import('@/views/auth/resetPassword'))) +const UnauthorizedPage = Loadable(lazy(() => import('@/views/auth/unauthorized'))) +const OrganizationSetupPage = Loadable(lazy(() => import('@/views/organization/index'))) +const LicenseExpiredPage = Loadable(lazy(() => import('@/views/auth/expired'))) + +const AuthRoutes = { + path: '/', + element: , + children: [ + { + path: '/login', + element: + }, + { + path: '/signin', + element: + }, + { + path: '/register', + element: + }, + { + path: '/verify', + element: + }, + { + path: '/forgot-password', + element: + }, + { + path: '/reset-password', + element: + }, + { + path: '/unauthorized', + element: + }, + { + path: '/organization-setup', + element: + }, + { + path: '/license-expired', + element: + } + ] +} + +export default AuthRoutes diff --git a/packages/ui/src/routes/CanvasRoutes.jsx b/packages/ui/src/routes/CanvasRoutes.jsx index ead159205ab..e90a962496a 100644 --- a/packages/ui/src/routes/CanvasRoutes.jsx +++ b/packages/ui/src/routes/CanvasRoutes.jsx @@ -3,6 +3,7 @@ import { lazy } from 'react' // project imports import Loadable from '@/ui-component/loading/Loadable' import MinimalLayout from '@/layout/MinimalLayout' +import { RequireAuth } from '@/routes/RequireAuth' // canvas routing const Canvas = Loadable(lazy(() => import('@/views/canvas'))) @@ -18,35 +19,67 @@ const CanvasRoutes = { children: [ { path: '/canvas', - element: + element: ( + + + + ) }, { path: '/canvas/:id', - element: + element: ( + + + + ) }, { path: '/agentcanvas', - element: + element: ( + + + + ) }, { path: '/agentcanvas/:id', - element: + element: ( + + + + ) }, { path: '/v2/agentcanvas', - element: + element: ( + + + + ) }, { path: '/v2/agentcanvas/:id', - element: + element: ( + + + + ) }, { path: '/marketplace/:id', - element: + element: ( + + + + ) }, { path: '/v2/marketplace/:id', - element: + element: ( + + + + ) } ] } diff --git a/packages/ui/src/routes/MainRoutes.jsx b/packages/ui/src/routes/MainRoutes.jsx index f50873de3b5..ce7caa0423d 100644 --- a/packages/ui/src/routes/MainRoutes.jsx +++ b/packages/ui/src/routes/MainRoutes.jsx @@ -4,6 +4,8 @@ import { lazy } from 'react' import MainLayout from '@/layout/MainLayout' import Loadable from '@/ui-component/loading/Loadable' +import { RequireAuth } from '@/routes/RequireAuth' + // chatflows routing const Chatflows = Loadable(lazy(() => import('@/views/chatflows'))) @@ -39,9 +41,35 @@ const LoaderConfigPreviewChunks = Loadable(lazy(() => import('@/views/docstore/L const VectorStoreConfigure = Loadable(lazy(() => import('@/views/docstore/VectorStoreConfigure'))) const VectorStoreQuery = Loadable(lazy(() => import('@/views/docstore/VectorStoreQuery'))) -// execution routing +// Evaluations routing +const EvalEvaluation = Loadable(lazy(() => import('@/views/evaluations/index'))) +const EvaluationResult = Loadable(lazy(() => import('@/views/evaluations/EvaluationResult'))) +const EvalDatasetRows = Loadable(lazy(() => import('@/views/datasets/DatasetItems'))) +const EvalDatasets = Loadable(lazy(() => import('@/views/datasets'))) +const Evaluators = Loadable(lazy(() => import('@/views/evaluators'))) + +// account routing +const Account = Loadable(lazy(() => import('@/views/account'))) +const UserProfile = Loadable(lazy(() => import('@/views/account/UserProfile'))) + +// files routing +const Files = Loadable(lazy(() => import('@/views/files'))) + +// logs routing +const Logs = Loadable(lazy(() => import('@/views/serverlogs'))) + +// executions routing const Executions = Loadable(lazy(() => import('@/views/agentexecutions'))) +// enterprise features +const UsersPage = Loadable(lazy(() => import('@/views/users'))) +const RolesPage = Loadable(lazy(() => import('@/views/roles'))) +const LoginActivityPage = Loadable(lazy(() => import('@/views/auth/loginActivity'))) +const Workspaces = Loadable(lazy(() => import('@/views/workspace'))) +const WorkspaceDetails = Loadable(lazy(() => import('@/views/workspace/WorkspaceUsers'))) +const SSOConfig = Loadable(lazy(() => import('@/views/auth/ssoConfig'))) +const SSOSuccess = Loadable(lazy(() => import('@/views/auth/ssoSuccess'))) + // ==============================|| MAIN ROUTING ||============================== // const MainRoutes = { @@ -50,83 +78,283 @@ const MainRoutes = { children: [ { path: '/', - element: + element: ( + + + + ) }, { path: '/chatflows', - element: + element: ( + + + + ) }, { path: '/agentflows', - element: + element: ( + + + + ) }, { path: '/executions', - element: + element: ( + + + + ) }, { path: '/marketplaces', - element: + element: ( + + + + ) }, { path: '/apikey', - element: + element: ( + + + + ) }, { path: '/tools', - element: + element: ( + + + + ) }, { path: '/assistants', - element: + element: ( + + + + ) }, { path: '/assistants/custom', - element: + element: ( + + + + ) }, { path: '/assistants/custom/:id', - element: + element: ( + + + + ) }, { path: '/assistants/openai', - element: + element: ( + + + + ) }, { path: '/credentials', - element: + element: ( + + + + ) }, { path: '/variables', - element: + element: ( + + + + ) }, { path: '/document-stores', - element: + element: ( + + + + ) }, { path: '/document-stores/:storeId', - element: + element: ( + + + + ) }, { path: '/document-stores/chunks/:storeId/:fileId', - element: + element: ( + + + + ) }, { path: '/document-stores/:storeId/:name', - element: + element: ( + + + + ) }, { path: '/document-stores/vector/:storeId', - element: + element: ( + + + + ) }, { path: '/document-stores/vector/:storeId/:docId', - element: + element: ( + + + + ) }, { path: '/document-stores/query/:storeId', - element: + element: ( + + + + ) + }, + { + path: '/datasets', + element: ( + + + + ) + }, + { + path: '/dataset_rows/:id', + element: ( + + + + ) + }, + { + path: '/evaluations', + element: ( + + + + ) + }, + { + path: '/evaluation_results/:id', + element: ( + + + + ) + }, + { + path: '/evaluators', + element: ( + + + + ) + }, + { + path: '/logs', + element: ( + + + + ) + }, + { + path: '/files', + element: ( + + + + ) + }, + { + path: '/account', + element: ( + + + + ) + }, + { + path: '/users', + element: ( + + + + ) + }, + { + path: '/user-profile', + element: + }, + { + path: '/roles', + element: ( + + + + ) + }, + { + path: '/login-activity', + element: ( + + + + ) + }, + { + path: '/workspaces', + element: ( + + + + ) + }, + { + path: '/workspace-users/:id', + element: ( + + + + ) + }, + { + path: '/sso-config', + element: ( + + + + ) + }, + { + path: '/sso-success', + element: } ] } diff --git a/packages/ui/src/routes/RequireAuth.jsx b/packages/ui/src/routes/RequireAuth.jsx new file mode 100644 index 00000000000..d9694d67408 --- /dev/null +++ b/packages/ui/src/routes/RequireAuth.jsx @@ -0,0 +1,82 @@ +import { Navigate } from 'react-router' +import PropTypes from 'prop-types' +import { useLocation } from 'react-router-dom' +import { useConfig } from '@/store/context/ConfigContext' +import { useAuth } from '@/hooks/useAuth' +import { useSelector } from 'react-redux' + +/** + * Checks if a feature flag is enabled + * @param {Object} features - Feature flags object + * @param {string} display - Feature flag key to check + * @param {React.ReactElement} children - Components to render if feature is enabled + * @returns {React.ReactElement} Children or unauthorized redirect + */ +const checkFeatureFlag = (features, display, children) => { + // Validate features object exists and is properly formatted + if (!features || Array.isArray(features) || Object.keys(features).length === 0) { + return + } + + // Check if feature flag exists and is enabled + if (Object.hasOwnProperty.call(features, display)) { + const isFeatureEnabled = features[display] === 'true' || features[display] === true + return isFeatureEnabled ? children : + } + + return +} + +export const RequireAuth = ({ permission, display, children }) => { + const location = useLocation() + const { isCloud, isOpenSource, isEnterpriseLicensed } = useConfig() + const { hasPermission } = useAuth() + const isGlobal = useSelector((state) => state.auth.isGlobal) + const currentUser = useSelector((state) => state.auth.user) + const features = useSelector((state) => state.auth.features) + const permissions = useSelector((state) => state.auth.permissions) + + // Step 1: Authentication Check + // Redirect to login if user is not authenticated + if (!currentUser) { + return + } + + // Step 2: Deployment Type Specific Logic + // Open Source: Only show features without display property + if (isOpenSource) { + return !display ? children : + } + + // Cloud & Enterprise: Check both permissions and feature flags + if (isCloud || isEnterpriseLicensed) { + // Allow access to basic features (no display property) + if (!display) return children + + // Check if user has any permissions + if (permissions.length === 0) { + return + } + + // Organization admins bypass permission checks + if (isGlobal) { + return checkFeatureFlag(features, display, children) + } + + // Check user permissions and feature flags + if (!permission || hasPermission(permission)) { + return checkFeatureFlag(features, display, children) + } + + return + } + + // Fallback: Allow access if none of the above conditions match + return children +} + +RequireAuth.propTypes = { + permission: PropTypes.string, + display: PropTypes.string, + children: PropTypes.element +} diff --git a/packages/ui/src/routes/index.jsx b/packages/ui/src/routes/index.jsx index 3d40f2f9ad9..7a5d76f1ba3 100644 --- a/packages/ui/src/routes/index.jsx +++ b/packages/ui/src/routes/index.jsx @@ -4,11 +4,12 @@ import { useRoutes } from 'react-router-dom' import MainRoutes from './MainRoutes' import CanvasRoutes from './CanvasRoutes' import ChatbotRoutes from './ChatbotRoutes' -import ExecutionRoutes from './ExecutionRoutes' import config from '@/config' +import AuthRoutes from '@/routes/AuthRoutes' +import ExecutionRoutes from './ExecutionRoutes' // ==============================|| ROUTING RENDER ||============================== // export default function ThemeRoutes() { - return useRoutes([MainRoutes, CanvasRoutes, ChatbotRoutes, ExecutionRoutes], config.basename) + return useRoutes([MainRoutes, AuthRoutes, CanvasRoutes, ChatbotRoutes, ExecutionRoutes], config.basename) } diff --git a/packages/ui/src/store/constant.js b/packages/ui/src/store/constant.js index 15a468955ae..627959fa1f2 100644 --- a/packages/ui/src/store/constant.js +++ b/packages/ui/src/store/constant.js @@ -26,6 +26,17 @@ export const baseURL = import.meta.env.VITE_API_BASE_URL || window.location.orig export const uiBaseURL = import.meta.env.VITE_UI_BASE_URL || window.location.origin export const FLOWISE_CREDENTIAL_ID = 'FLOWISE_CREDENTIAL_ID' export const REDACTED_CREDENTIAL_VALUE = '_FLOWISE_BLANK_07167752-1a71-43b1-bf8f-4f32252165db' +export const ErrorMessage = { + INVALID_MISSING_TOKEN: 'Invalid or Missing token', + TOKEN_EXPIRED: 'Token Expired', + REFRESH_TOKEN_EXPIRED: 'Refresh Token Expired', + FORBIDDEN: 'Forbidden', + UNKNOWN_USER: 'Unknown Username or Password', + INCORRECT_PASSWORD: 'Incorrect Password', + INACTIVE_USER: 'Inactive User', + INVALID_WORKSPACE: 'No Workspace Assigned', + UNKNOWN_ERROR: 'Unknown Error' +} export const AGENTFLOW_ICONS = [ { name: 'conditionAgentflow', diff --git a/packages/ui/src/store/context/ConfigContext.jsx b/packages/ui/src/store/context/ConfigContext.jsx new file mode 100644 index 00000000000..a11ef281725 --- /dev/null +++ b/packages/ui/src/store/context/ConfigContext.jsx @@ -0,0 +1,55 @@ +import platformsettingsApi from '@/api/platformsettings' +import PropTypes from 'prop-types' +import { createContext, useContext, useEffect, useState } from 'react' + +const ConfigContext = createContext() + +export const ConfigProvider = ({ children }) => { + const [config, setConfig] = useState({}) + const [loading, setLoading] = useState(true) + const [isEnterpriseLicensed, setEnterpriseLicensed] = useState(false) + const [isCloud, setCloudLicensed] = useState(false) + const [isOpenSource, setOpenSource] = useState(false) + + useEffect(() => { + const userSettings = platformsettingsApi.getSettings() + Promise.all([userSettings]) + .then(([currentSettingsData]) => { + const finalData = { + ...currentSettingsData.data + } + setConfig(finalData) + if (finalData.PLATFORM_TYPE) { + if (finalData.PLATFORM_TYPE === 'enterprise') { + setEnterpriseLicensed(true) + setCloudLicensed(false) + setOpenSource(false) + } else if (finalData.PLATFORM_TYPE === 'cloud') { + setCloudLicensed(true) + setEnterpriseLicensed(false) + setOpenSource(false) + } else { + setOpenSource(true) + setEnterpriseLicensed(false) + setCloudLicensed(false) + } + } + + setLoading(false) + }) + .catch((error) => { + console.error('Error fetching data:', error) + setLoading(false) + }) + }, []) + + return ( + {children} + ) +} + +export const useConfig = () => useContext(ConfigContext) + +ConfigProvider.propTypes = { + children: PropTypes.any +} diff --git a/packages/ui/src/store/context/ErrorContext.jsx b/packages/ui/src/store/context/ErrorContext.jsx new file mode 100644 index 00000000000..e41070a1516 --- /dev/null +++ b/packages/ui/src/store/context/ErrorContext.jsx @@ -0,0 +1,59 @@ +import { createContext, useContext, useState } from 'react' +import { redirectWhenUnauthorized } from '@/utils/genericHelper' +import PropTypes from 'prop-types' +import { useNavigate } from 'react-router-dom' +import { store } from '@/store' +import { logoutSuccess } from '@/store/reducers/authSlice' +import { ErrorMessage } from '../constant' + +const ErrorContext = createContext() + +export const ErrorProvider = ({ children }) => { + const [error, setError] = useState(null) + const navigate = useNavigate() + + const handleError = async (err) => { + console.error(err) + if (err?.response?.status === 403) { + navigate('/unauthorized') + } else if (err?.response?.status === 401) { + if (ErrorMessage.INVALID_MISSING_TOKEN === err?.response?.data?.message) { + store.dispatch(logoutSuccess()) + navigate('/login') + } else { + const isRedirect = err?.response?.data?.redirectTo && err?.response?.data?.error + + if (isRedirect) { + redirectWhenUnauthorized({ + error: err.response.data.error, + redirectTo: err.response.data.redirectTo + }) + } else { + const currentPath = window.location.pathname + if (currentPath !== '/signin' && currentPath !== '/login') { + store.dispatch(logoutSuccess()) + navigate('/login') + } + } + } + } else setError(err) + } + + return ( + + {children} + + ) +} + +export const useError = () => useContext(ErrorContext) + +ErrorProvider.propTypes = { + children: PropTypes.any +} diff --git a/packages/ui/src/store/reducer.jsx b/packages/ui/src/store/reducer.jsx index b464e57196c..a9bb1d8f1b6 100644 --- a/packages/ui/src/store/reducer.jsx +++ b/packages/ui/src/store/reducer.jsx @@ -5,6 +5,7 @@ import customizationReducer from './reducers/customizationReducer' import canvasReducer from './reducers/canvasReducer' import notifierReducer from './reducers/notifierReducer' import dialogReducer from './reducers/dialogReducer' +import authReducer from './reducers/authSlice' // ==============================|| COMBINE REDUCER ||============================== // @@ -12,7 +13,8 @@ const reducer = combineReducers({ customization: customizationReducer, canvas: canvasReducer, notifier: notifierReducer, - dialog: dialogReducer + dialog: dialogReducer, + auth: authReducer }) export default reducer diff --git a/packages/ui/src/store/reducers/authSlice.js b/packages/ui/src/store/reducers/authSlice.js new file mode 100644 index 00000000000..2136346899f --- /dev/null +++ b/packages/ui/src/store/reducers/authSlice.js @@ -0,0 +1,65 @@ +// authSlice.js +import { createSlice } from '@reduxjs/toolkit' +import AuthUtils from '@/utils/authUtils' + +const initialState = { + user: localStorage.getItem('user') ? JSON.parse(localStorage.getItem('user')) : null, + isAuthenticated: 'true' === localStorage.getItem('isAuthenticated'), + isGlobal: 'true' === localStorage.getItem('isGlobal'), + token: null, + permissions: + localStorage.getItem('permissions') && localStorage.getItem('permissions') !== 'undefined' + ? JSON.parse(localStorage.getItem('permissions')) + : null, + features: localStorage.getItem('features') ? JSON.parse(localStorage.getItem('features')) : null +} + +const authSlice = createSlice({ + name: 'auth', + initialState, + reducers: { + loginSuccess: (state, action) => { + AuthUtils.updateStateAndLocalStorage(state, action.payload) + }, + logoutSuccess: (state) => { + state.user = null + state.token = null + state.permissions = null + state.features = null + state.isAuthenticated = false + state.isGlobal = false + AuthUtils.removeCurrentUser() + }, + workspaceSwitchSuccess: (state, action) => { + AuthUtils.updateStateAndLocalStorage(state, action.payload) + }, + upgradePlanSuccess: (state, action) => { + AuthUtils.updateStateAndLocalStorage(state, action.payload) + }, + userProfileUpdated: (state, action) => { + const user = AuthUtils.extractUser(action.payload) + state.user.name = user.name + state.user.email = user.email + AuthUtils.updateCurrentUser(state.user) + }, + workspaceNameUpdated: (state, action) => { + const updatedWorkspace = action.payload + // find the matching assignedWorkspace and update it + const assignedWorkspaces = state.user.assignedWorkspaces.map((workspace) => { + if (workspace.id === updatedWorkspace.id) { + return { + ...workspace, + name: updatedWorkspace.name + } + } + return workspace + }) + state.user.assignedWorkspaces = assignedWorkspaces + AuthUtils.updateCurrentUser(state.user) + } + } +}) + +export const { loginSuccess, logoutSuccess, workspaceSwitchSuccess, upgradePlanSuccess, userProfileUpdated, workspaceNameUpdated } = + authSlice.actions +export default authSlice.reducer diff --git a/packages/ui/src/ui-component/button/FlowListMenu.jsx b/packages/ui/src/ui-component/button/FlowListMenu.jsx index 35b644da78f..e70bbd85276 100644 --- a/packages/ui/src/ui-component/button/FlowListMenu.jsx +++ b/packages/ui/src/ui-component/button/FlowListMenu.jsx @@ -4,7 +4,7 @@ import PropTypes from 'prop-types' import { styled, alpha } from '@mui/material/styles' import Menu from '@mui/material/Menu' -import MenuItem from '@mui/material/MenuItem' +import { PermissionMenuItem } from '@/ui-component/button/RBACButtons' import EditIcon from '@mui/icons-material/Edit' import Divider from '@mui/material/Divider' import FileCopyIcon from '@mui/icons-material/FileCopy' @@ -74,7 +74,7 @@ const StyledMenu = styled((props) => ( } })) -export default function FlowListMenu({ chatflow, isAgentCanvas, setError, updateFlowsApi }) { +export default function FlowListMenu({ chatflow, isAgentCanvas, isAgentflowV2, setError, updateFlowsApi }) { const { confirm } = useConfirm() const dispatch = useDispatch() const updateChatflowApi = useApi(chatflowsApi.updateChatflow) @@ -269,7 +269,13 @@ export default function FlowListMenu({ chatflow, isAgentCanvas, setError, update setAnchorEl(null) try { localStorage.setItem('duplicatedFlowData', chatflow.flowData) - window.open(`${uiBaseURL}/${isAgentCanvas ? 'agentcanvas' : 'canvas'}`, '_blank') + if (isAgentflowV2) { + window.open(`${uiBaseURL}/v2/agentcanvas`, '_blank') + } else if (isAgentCanvas) { + window.open(`${uiBaseURL}/agentcanvas`, '_blank') + } else { + window.open(`${uiBaseURL}/canvas`, '_blank') + } } catch (e) { console.error(e) } @@ -317,48 +323,84 @@ export default function FlowListMenu({ chatflow, isAgentCanvas, setError, update open={open} onClose={handleClose} > - + Rename - - + + Duplicate - - + + Export - - + + Save As Template - + - + Starter Prompts - - + + Chat Feedback - - + + Allowed Domains - - + + Speech To Text - - + + Update Category - + - + Delete - + { + const { hasPermission, hasDisplay } = useAuth() + + if (!hasPermission(permissionId) || !hasDisplay(display)) { + return null + } + + return +} + +export const StyledPermissionToggleButton = ({ permissionId, display, ...props }) => { + const { hasPermission, hasDisplay } = useAuth() + + if (!hasPermission(permissionId) || !hasDisplay(display)) { + return null + } + + return +} + +export const PermissionIconButton = ({ permissionId, display, ...props }) => { + const { hasPermission, hasDisplay } = useAuth() + + if (!hasPermission(permissionId) || !hasDisplay(display)) { + return null + } + + return +} + +export const PermissionButton = ({ permissionId, display, ...props }) => { + const { hasPermission, hasDisplay } = useAuth() + + if (!hasPermission(permissionId) || !hasDisplay(display)) { + return null + } + + return + ) + } + }) + return + } + } + setIsSaving(true) + try { + const responses = await Promise.all( + selectedUsers.map(async (item) => { + const saveObj = item.isNewUser + ? { + user: { + email: item.email, + createdBy: currentUser.id + }, + workspace: { + id: selectedWorkspace.id + }, + role: { + id: selectedRole.id + } + } + : { + user: { + email: item.user.email, + createdBy: currentUser.id + }, + workspace: { + id: selectedWorkspace.id + }, + role: { + id: selectedRole.id + } + } + + const response = await accountApi.inviteAccount(saveObj) + return response.data + }) + ) + if (responses.length > 0) { + enqueueSnackbar({ + message: 'Users invited to workspace', + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + onConfirm() // Pass the first ID or modify as needed + } else { + throw new Error('No data received from the server') + } + } catch (error) { + console.error('Error in saveInvite:', error) + enqueueSnackbar({ + message: `Failed to invite users to workspace: ${error.response?.data?.message || error.message || 'Unknown error'}`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + } finally { + setIsSaving(false) + } + } + + const validateEmail = (email) => { + return email.match( + /^(([^<>()[\]\\.,;:\s@"]+(\.[^<>()[\]\\.,;:\s@"]+)*)|(".+"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/ + ) + } + + const handleChange = (event, newValue) => { + const updatedUsers = newValue + .filter((item) => { + if (item.isNewUser) { + // For new invites, validate the email + return validateEmail(item.email) + } + return true // Keep all existing users + }) + .map((item) => { + if (item.isNewUser) { + // This is a new invite + return { + email: item.email, + isNewUser: true, + alreadyInWorkspace: false + } + } else { + const existingUser = + userSearchResults.length > 0 + ? userSearchResults.find((result) => result.user.email === item.user.email) + : selectedUsers.find((result) => result.user.email === item.user.email) + return { + ...existingUser, + isNewUser: false, + alreadyInWorkspace: selectedWorkspace + ? existingUser && + existingUser.workspaceNames && + existingUser.workspaceNames.some((ws) => ws.id === selectedWorkspace.id) + : false + } + } + }) + + setSelectedUsers(updatedUsers) + + // If any invalid emails were filtered out, show a notification + if (updatedUsers.length < newValue.length) { + enqueueSnackbar({ + message: 'One or more invalid emails were removed.', + options: { + key: new Date().getTime() + Math.random(), + variant: 'warning', + action: (key) => ( + + ) + } + }) + } + } + + const handleInputChange = (event, newInputValue) => { + setSearchString(newInputValue) + const searchTerm = newInputValue.toLowerCase() + const filteredUsers = allUsers.filter( + (item) => item.user.name.toLowerCase().includes(searchTerm) || item.user.email.toLowerCase().includes(searchTerm) + ) + setUserSearchResults(filteredUsers) + setAllUsers((prevResults) => { + const newResults = [...prevResults] + filteredUsers.forEach((item) => { + if (!newResults.some((result) => result.user.id === item.user.id)) { + newResults.push(item) + } + }) + return newResults + }) + } + + const userSearchFilterOptions = (options, { inputValue }) => { + const filteredOptions = options.filter((option) => option !== null && option !== undefined) ?? [] + + // First filter out already selected users + const selectedUserEmails = selectedUsers.filter((user) => !user.isNewUser && user.user).map((user) => user.user.email) + + const unselectedOptions = filteredOptions.filter((option) => !option.user || !selectedUserEmails.includes(option.user.email)) + + const filterByNameOrEmail = unselectedOptions.filter( + (option) => + (option.user && option.user.name && option.user.name.toLowerCase().includes(inputValue.toLowerCase())) || + (option.user && option.user.email && option.user.email.toLowerCase().includes(inputValue.toLowerCase())) + ) + + // Early email detection regex + const partialEmailRegex = /^[^\s@]+@?[^\s@]*$/ + + if (filterByNameOrEmail.length === 0 && partialEmailRegex.test(inputValue)) { + // If it looks like an email (even partially), show the invite option + const inviteEmail = inputValue.includes('@') ? inputValue : `${inputValue}@` + // Check if this email is already in the selected users list + const isAlreadySelected = selectedUsers.some( + (user) => + (user.isNewUser && user.email === inviteEmail) || (!user.isNewUser && user.user && user.user.email === inviteEmail) + ) + + if (!isAlreadySelected) { + return [{ name: `Invite ${inviteEmail}`, email: inviteEmail, isNewUser: true }] + } + } + + if (filterByNameOrEmail.length === 0) { + return [{ name: 'No results found', email: '', isNoResult: true, disabled: true }] + } + + return filterByNameOrEmail + } + + const renderUserSearchInput = (params) => ( + 0 ? '' : 'Invite users by name or email'} /> + ) + + const renderUserSearchOptions = (props, option) => { + // Custom logic to determine if an option is selected, since state.selected seems unreliable + const isOptionSelected = option.isNewUser + ? selectedUsers.some((user) => user.isNewUser && user.email === option.email) + : selectedUsers.some((user) => !user.isNewUser && user.user && user.user.email === option.user?.email) + + return ( +
  • + {option.isNoResult ? ( + + No results found + + ) : option.isNewUser ? ( + + + {option.name} + + + ) : ( + + + {option.user.name} + {option.user.email} + + {isOptionSelected ? : null} + + )} +
  • + ) + } + + const renderSelectedUsersTags = (tagValue, getTagProps) => { + return selectedUsers.map((option, index) => { + const chipProps = getTagProps({ index }) + let chipType = option.isNewUser ? 'new' : 'existing' + if (option.alreadyInWorkspace) { + chipType = 'already-in-workspace' + } + const ChipComponent = option.isNewUser ? ( + + ) : ( + + ) + + const tooltipTitle = option.alreadyInWorkspace + ? `${option.user.name || option.user.email} is already a member of this workspace and won't be invited again.` + : option.isNewUser + ? 'An invitation will be sent to this email address' + : '' + + return tooltipTitle ? ( + + {ChipComponent} + + ) : ( + ChipComponent + ) + }) + } + + const handleWorkspaceChange = (event, newWorkspace) => { + setSelectedWorkspace(newWorkspace) + setSelectedUsers((prevUsers) => + prevUsers.map((user) => ({ + ...user, + alreadyInWorkspace: newWorkspace + ? user.workspaceNames && newWorkspace && user.workspaceNames.some((ws) => ws.id === newWorkspace.id) + : false + })) + ) + } + + const handleRoleChange = (event, newRole) => { + setSelectedRole(newRole) + } + + const getWorkspaceValue = () => { + if (dialogProps.data) { + return selectedWorkspace || {} + } + return selectedWorkspace || null + } + + const getRoleValue = () => { + if (dialogProps.data && dialogProps.type === 'ADD') { + return selectedRole || {} + } + return selectedRole || null + } + + const checkDisabled = () => { + if (isSaving || selectedUsers.length === 0 || !selectedWorkspace || !selectedRole) { + return true + } + return false + } + + const checkWorkspaceDisabled = () => { + if (dialogProps.data && dialogProps.type === 'ADD') { + return Boolean(selectedWorkspace) + } else if (dialogProps.data && dialogProps.type === 'EDIT') { + return dialogProps.disableWorkspaceSelection + } + return false + } + + const component = show ? ( + + +
    + + Invite Users +
    +
    + + + + Select Users * + + option.userId} + getOptionLabel={(option) => option.email || ''} + filterOptions={userSearchFilterOptions} + onChange={handleChange} + inputValue={searchString} + onInputChange={handleInputChange} + isOptionEqualToValue={(option, value) => { + // Compare based on user.email for existing users or email for new users + if (option.isNewUser && value.isNewUser) { + return option.email === value.email + } else if (!option.isNewUser && !value.isNewUser) { + return option.user?.email === value.user?.email + } + return false + }} + renderInput={renderUserSearchInput} + renderOption={renderUserSearchOptions} + renderTags={renderSelectedUsersTags} + sx={{ mt: 1 }} + value={selectedUsers} + PopperComponent={StyledPopper} + /> + + + + + Workspace * + + option.label || ''} + onChange={handleWorkspaceChange} + options={workspaces} + renderInput={(params) => } + sx={{ mt: 0.5 }} + value={getWorkspaceValue()} + PopperComponent={StyledPopper} + /> + + + + Role to Assign * + + option.label || ''} + onChange={handleRoleChange} + options={availableRoles} + renderInput={(params) => } + sx={{ mt: 0.5 }} + value={getRoleValue()} + PopperComponent={StyledPopper} + /> + + + + + + : null} + > + {dialogProps.confirmButtonName} + + + +
    + ) : null + + return createPortal(component, portalElement) +} + +InviteUsersDialog.propTypes = { + show: PropTypes.bool, + dialogProps: PropTypes.object, + onCancel: PropTypes.func, + onConfirm: PropTypes.func +} + +export default InviteUsersDialog diff --git a/packages/ui/src/ui-component/dialog/LoginDialog.jsx b/packages/ui/src/ui-component/dialog/LoginDialog.jsx deleted file mode 100644 index fe982b3b5ca..00000000000 --- a/packages/ui/src/ui-component/dialog/LoginDialog.jsx +++ /dev/null @@ -1,70 +0,0 @@ -import { createPortal } from 'react-dom' -import { useState } from 'react' -import PropTypes from 'prop-types' - -import { Dialog, DialogActions, DialogContent, Typography, DialogTitle } from '@mui/material' -import { StyledButton } from '@/ui-component/button/StyledButton' -import { Input } from '@/ui-component/input/Input' - -const LoginDialog = ({ show, dialogProps, onConfirm }) => { - const portalElement = document.getElementById('portal') - const usernameInput = { - label: 'Username', - name: 'username', - type: 'string', - placeholder: 'john doe' - } - const passwordInput = { - label: 'Password', - name: 'password', - type: 'password' - } - const [usernameVal, setUsernameVal] = useState('') - const [passwordVal, setPasswordVal] = useState('') - - const component = show ? ( - { - if (e.key === 'Enter') { - onConfirm(usernameVal, passwordVal) - } - }} - open={show} - fullWidth - maxWidth='xs' - aria-labelledby='alert-dialog-title' - aria-describedby='alert-dialog-description' - > - - {dialogProps.title} - - - Username - setUsernameVal(newValue)} - value={usernameVal} - showDialog={false} - /> -
    - Password - setPasswordVal(newValue)} value={passwordVal} /> -
    - - onConfirm(usernameVal, passwordVal)}> - {dialogProps.confirmButtonName} - - -
    - ) : null - - return createPortal(component, portalElement) -} - -LoginDialog.propTypes = { - show: PropTypes.bool, - dialogProps: PropTypes.object, - onConfirm: PropTypes.func -} - -export default LoginDialog diff --git a/packages/ui/src/ui-component/dialog/ShareWithWorkspaceDialog.jsx b/packages/ui/src/ui-component/dialog/ShareWithWorkspaceDialog.jsx new file mode 100644 index 00000000000..f3b3a655fe7 --- /dev/null +++ b/packages/ui/src/ui-component/dialog/ShareWithWorkspaceDialog.jsx @@ -0,0 +1,229 @@ +import { createPortal } from 'react-dom' +import PropTypes from 'prop-types' +import { useState, useEffect, useMemo } from 'react' +import { useDispatch, useSelector } from 'react-redux' +import { enqueueSnackbar as enqueueSnackbarAction, closeSnackbar as closeSnackbarAction } from '@/store/actions' +import { cloneDeep } from 'lodash' + +// Material +import { Button, Dialog, DialogActions, DialogContent, DialogTitle, Box, Stack, OutlinedInput, Typography } from '@mui/material' + +// Project imports +import { StyledButton } from '@/ui-component/button/StyledButton' +import ConfirmDialog from '@/ui-component/dialog/ConfirmDialog' +import { Grid } from '@/ui-component/grid/Grid' + +// Icons +import { IconX, IconShare } from '@tabler/icons-react' + +// API +import workspaceApi from '@/api/workspace' +import userApi from '@/api/user' + +// Hooks +import useApi from '@/hooks/useApi' + +// utils +import useNotifier from '@/utils/useNotifier' + +// const +import { HIDE_CANVAS_DIALOG, SHOW_CANVAS_DIALOG } from '@/store/actions' + +const ShareWithWorkspaceDialog = ({ show, dialogProps, onCancel, setError }) => { + const portalElement = document.getElementById('portal') + + const dispatch = useDispatch() + + // ==============================|| Snackbar ||============================== // + + useNotifier() + const getSharedWorkspacesForItemApi = useApi(workspaceApi.getSharedWorkspacesForItem) + const getWorkspacesByOrganizationIdUserIdApi = useApi(userApi.getWorkspacesByOrganizationIdUserId) + const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) + const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) + + const user = useSelector((state) => state.auth.user) + + const [outputSchema, setOutputSchema] = useState([]) + + const [name, setName] = useState('') + + const onRowUpdate = (newRow) => { + setTimeout(() => { + setOutputSchema((prevRows) => { + let allRows = [...cloneDeep(prevRows)] + const indexToUpdate = allRows.findIndex((row) => row.id === newRow.id) + if (indexToUpdate >= 0) { + allRows[indexToUpdate] = { ...newRow } + } + return allRows + }) + }) + } + + const columns = useMemo( + () => [ + { field: 'workspaceName', headerName: 'Workspace', editable: false, flex: 1 }, + { field: 'shared', headerName: 'Share', type: 'boolean', editable: true, width: 180 } + ], + [] + ) + + useEffect(() => { + if (getSharedWorkspacesForItemApi.data) { + const data = getSharedWorkspacesForItemApi.data + if (data && data.length > 0) { + outputSchema.map((row) => { + data.map((ws) => { + if (row.id === ws.workspaceId) { + row.shared = true + } + }) + }) + setOutputSchema([...outputSchema]) + } + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getSharedWorkspacesForItemApi.data]) + + useEffect(() => { + if (getSharedWorkspacesForItemApi.error && setError) { + setError(getSharedWorkspacesForItemApi.error) + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getSharedWorkspacesForItemApi.error]) + + useEffect(() => { + if (getWorkspacesByOrganizationIdUserIdApi.data) { + const workspaces = [] + getWorkspacesByOrganizationIdUserIdApi.data + .filter((ws) => ws.workspace.id !== user.activeWorkspaceId) + .map((ws) => { + workspaces.push({ + id: ws.workspace.id, + workspaceName: ws.workspace.name, + shared: false + }) + }) + setOutputSchema([...workspaces]) + } + }, [getWorkspacesByOrganizationIdUserIdApi.data, user.activeWorkspaceId]) + + useEffect(() => { + if (getWorkspacesByOrganizationIdUserIdApi.error && setError) { + setError(getWorkspacesByOrganizationIdUserIdApi.error) + } + }, [getWorkspacesByOrganizationIdUserIdApi.error, setError]) + + useEffect(() => { + if (user) { + getWorkspacesByOrganizationIdUserIdApi.request(user.activeOrganizationId, user.id) + } + setName(dialogProps.data.name) + getSharedWorkspacesForItemApi.request(dialogProps.data.id) + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [dialogProps, user]) + + useEffect(() => { + if (show) dispatch({ type: SHOW_CANVAS_DIALOG }) + else dispatch({ type: HIDE_CANVAS_DIALOG }) + return () => dispatch({ type: HIDE_CANVAS_DIALOG }) + }, [show, dispatch]) + + const shareItemRequest = async () => { + try { + const obj = { + itemType: dialogProps.data.itemType, + workspaceIds: [] + } + outputSchema.map((row) => { + if (row.shared) { + obj.workspaceIds.push(row.id) + } + }) + const sharedResp = await workspaceApi.setSharedWorkspacesForItem(dialogProps.data.id, obj) + if (sharedResp.data) { + enqueueSnackbar({ + message: 'Items Shared Successfully', + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + onCancel() + } + } catch (error) { + if (setError) setError(error) + enqueueSnackbar({ + message: `Failed to share Item: ${ + typeof error.response.data === 'object' ? error.response.data.message : error.response.data + }`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + onCancel() + } + } + + const component = show ? ( + + +
    + + {dialogProps.data.title} +
    +
    + + + + Name + + + + + + + + + + + {dialogProps.confirmButtonName} + + + +
    + ) : null + + return createPortal(component, portalElement) +} + +ShareWithWorkspaceDialog.propTypes = { + show: PropTypes.bool, + dialogProps: PropTypes.object, + onCancel: PropTypes.func, + onConfirm: PropTypes.func, + setError: PropTypes.func +} + +export default ShareWithWorkspaceDialog diff --git a/packages/ui/src/ui-component/dropdown/AsyncDropdown.jsx b/packages/ui/src/ui-component/dropdown/AsyncDropdown.jsx index f8ad6f91ee8..bf4c8bb1c3a 100644 --- a/packages/ui/src/ui-component/dropdown/AsyncDropdown.jsx +++ b/packages/ui/src/ui-component/dropdown/AsyncDropdown.jsx @@ -31,18 +31,17 @@ const StyledPopper = styled(Popper)({ const fetchList = async ({ name, nodeData, previousNodes, currentNode }) => { const selectedParam = nodeData.inputParams.find((param) => param.name === name) const loadMethod = selectedParam?.loadMethod - const username = localStorage.getItem('username') - const password = localStorage.getItem('password') + + let config = { + headers: { + 'x-request-from': 'internal', + 'Content-type': 'application/json' + }, + withCredentials: true + } let lists = await axios - .post( - `${baseURL}/api/v1/node-load-method/${nodeData.name}`, - { ...nodeData, loadMethod, previousNodes, currentNode }, - { - auth: username && password ? { username, password } : undefined, - headers: { 'Content-type': 'application/json', 'x-request-from': 'internal' } - } - ) + .post(`${baseURL}/api/v1/node-load-method/${nodeData.name}`, { ...nodeData, loadMethod, previousNodes, currentNode }, config) .then(async function (response) { return response.data }) diff --git a/packages/ui/src/ui-component/form/settings.jsx b/packages/ui/src/ui-component/form/settings.jsx new file mode 100644 index 00000000000..9d785b58fbe --- /dev/null +++ b/packages/ui/src/ui-component/form/settings.jsx @@ -0,0 +1,66 @@ +import { useTheme } from '@mui/material/styles' +import { Box, Typography } from '@mui/material' +import { gridSpacing } from '@/store/constant' +import PropTypes from 'prop-types' + +const SettingsSection = ({ action, children, title }) => { + const theme = useTheme() + + return ( + + + + {title} + + + + {children} + + {action && ( + + {action} + + )} + + ) +} + +SettingsSection.propTypes = { + action: PropTypes.node, + children: PropTypes.node, + title: PropTypes.string +} + +export default SettingsSection diff --git a/packages/ui/src/ui-component/input/Input.jsx b/packages/ui/src/ui-component/input/Input.jsx index 7571726f9e1..2468625100b 100644 --- a/packages/ui/src/ui-component/input/Input.jsx +++ b/packages/ui/src/ui-component/input/Input.jsx @@ -32,6 +32,8 @@ export const Input = ({ inputParam, value, nodes, edges, nodeId, onChange, disab return 'password' case 'number': return 'number' + case 'email': + return 'email' default: return 'text' } diff --git a/packages/ui/src/ui-component/rbac/available.jsx b/packages/ui/src/ui-component/rbac/available.jsx new file mode 100644 index 00000000000..884bfeea0fa --- /dev/null +++ b/packages/ui/src/ui-component/rbac/available.jsx @@ -0,0 +1,14 @@ +import PropTypes from 'prop-types' +import { useAuth } from '@/hooks/useAuth' + +export const Available = ({ permission, children }) => { + const { hasPermission } = useAuth() + if (hasPermission(permission)) { + return children + } +} + +Available.propTypes = { + permission: PropTypes.string, + children: PropTypes.element +} diff --git a/packages/ui/src/ui-component/subscription/PricingDialog.jsx b/packages/ui/src/ui-component/subscription/PricingDialog.jsx new file mode 100644 index 00000000000..7f95f2d7c43 --- /dev/null +++ b/packages/ui/src/ui-component/subscription/PricingDialog.jsx @@ -0,0 +1,670 @@ +import { useState, useEffect, useMemo } from 'react' +import { useSelector } from 'react-redux' +import PropTypes from 'prop-types' +import { + Dialog, + DialogContent, + DialogTitle, + Grid, + Typography, + Button, + IconButton, + Box, + CircularProgress, + DialogActions +} from '@mui/material' +import { IconX, IconCheck, IconCreditCard, IconExternalLink, IconAlertCircle } from '@tabler/icons-react' +import { useTheme, alpha } from '@mui/material/styles' +import accountApi from '@/api/account.api' +import pricingApi from '@/api/pricing' +import workspaceApi from '@/api/workspace' +import userApi from '@/api/user' +import useApi from '@/hooks/useApi' +import { useSnackbar } from 'notistack' +import { store } from '@/store' +import { upgradePlanSuccess } from '@/store/reducers/authSlice' + +const PricingDialog = ({ open, onClose }) => { + const customization = useSelector((state) => state.customization) + const currentUser = useSelector((state) => state.auth.user) + const theme = useTheme() + const { enqueueSnackbar } = useSnackbar() + + const [openPlanDialog, setOpenPlanDialog] = useState(false) + const [selectedPlan, setSelectedPlan] = useState(null) + const [prorationInfo, setProrationInfo] = useState(null) + const [isUpdatingPlan, setIsUpdatingPlan] = useState(false) + const [purchasedSeats, setPurchasedSeats] = useState(0) + const [occupiedSeats, setOccupiedSeats] = useState(0) + const [workspaceCount, setWorkspaceCount] = useState(0) + const [isOpeningBillingPortal, setIsOpeningBillingPortal] = useState(false) + + const getPricingPlansApi = useApi(pricingApi.getPricingPlans) + const getCustomerDefaultSourceApi = useApi(userApi.getCustomerDefaultSource) + const getPlanProrationApi = useApi(userApi.getPlanProration) + const getAdditionalSeatsQuantityApi = useApi(userApi.getAdditionalSeatsQuantity) + const getAllWorkspacesApi = useApi(workspaceApi.getAllWorkspacesByOrganizationId) + + useEffect(() => { + getPricingPlansApi.request() + getAdditionalSeatsQuantityApi.request(currentUser?.activeOrganizationSubscriptionId) + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []) + + const handlePlanClick = async (plan) => { + if (plan.title === 'Enterprise') { + window.location.href = 'mailto:hello@flowiseai.com' + return + } + + setSelectedPlan(plan) + setOpenPlanDialog(true) + getCustomerDefaultSourceApi.request(currentUser?.activeOrganizationCustomerId) + } + + const handleBillingPortalClick = async () => { + setIsOpeningBillingPortal(true) + try { + const response = await accountApi.getBillingData() + if (response.data?.url) { + setOpenPlanDialog(false) + window.open(response.data.url, '_blank') + } + } catch (error) { + console.error('Error accessing billing portal:', error) + } + setIsOpeningBillingPortal(false) + } + + const handleUpdatePlan = async () => { + if (!selectedPlan || !prorationInfo) return + + setIsUpdatingPlan(true) + try { + const response = await userApi.updateSubscriptionPlan( + currentUser.activeOrganizationSubscriptionId, + selectedPlan.prodId, + prorationInfo.prorationDate + ) + if (response.data.status === 'success') { + // Subscription updated successfully + store.dispatch(upgradePlanSuccess(response.data.user)) + enqueueSnackbar('Subscription updated successfully!', { variant: 'success' }) + onClose(true) + } else { + const errorMessage = response.data.message || 'Subscription failed to update' + enqueueSnackbar(errorMessage, { variant: 'error' }) + onClose() + } + } catch (error) { + console.error('Error updating plan:', error) + const errorMessage = err.response?.data?.message || 'Failed to verify subscription' + enqueueSnackbar(errorMessage, { variant: 'error' }) + onClose() + } finally { + setIsUpdatingPlan(false) + setOpenPlanDialog(false) + } + } + + useEffect(() => { + if (getAllWorkspacesApi.data) { + setWorkspaceCount(getAllWorkspacesApi.data?.length || 0) + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getAllWorkspacesApi.data]) + + useEffect(() => { + if ( + getCustomerDefaultSourceApi.data && + getCustomerDefaultSourceApi.data?.invoice_settings?.default_payment_method && + currentUser?.activeOrganizationSubscriptionId + ) { + getPlanProrationApi.request(currentUser.activeOrganizationSubscriptionId, selectedPlan.prodId) + } + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getCustomerDefaultSourceApi.data]) + + useEffect(() => { + if (getPlanProrationApi.data) { + setProrationInfo(getPlanProrationApi.data) + } + }, [getPlanProrationApi.data]) + + useEffect(() => { + if (getAdditionalSeatsQuantityApi.data) { + const purchased = getAdditionalSeatsQuantityApi.data?.quantity || 0 + const occupied = getAdditionalSeatsQuantityApi.data?.totalOrgUsers || 1 + + setPurchasedSeats(purchased) + setOccupiedSeats(occupied) + } + }, [getAdditionalSeatsQuantityApi.data]) + + const pricingPlans = useMemo(() => { + if (!getPricingPlansApi.data) return [] + + return getPricingPlansApi.data.map((plan) => { + // Enterprise plan has special handling + if (plan.title === 'Enterprise') { + return { + ...plan, + buttonText: 'Contact Us', + buttonVariant: 'outlined', + buttonAction: () => handlePlanClick(plan) + } + } + + const isCurrentPlanValue = currentUser?.activeOrganizationProductId === plan.prodId + const isStarterPlan = plan.title === 'Starter' + + if (isCurrentPlanValue && (plan.title === 'Pro' || plan.title === 'Enterprise')) { + getAllWorkspacesApi.request(currentUser?.activeOrganizationId) + } + + return { + ...plan, + currentPlan: isCurrentPlanValue, + isStarterPlan, + buttonText: isCurrentPlanValue ? 'Current Plan' : 'Get Started', + buttonVariant: plan.mostPopular ? 'contained' : 'outlined', + disabled: isCurrentPlanValue || !currentUser.isOrganizationAdmin, + buttonAction: () => handlePlanClick(plan) + } + }) + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getPricingPlansApi.data, currentUser.isOrganizationAdmin]) + + const handleClose = () => { + if (!isUpdatingPlan) { + setProrationInfo(null) + onClose() + } + } + + const handlePlanDialogClose = () => { + if (!isUpdatingPlan) { + setProrationInfo(null) + setOpenPlanDialog(false) + } + } + + return ( + <> + theme.palette.background.default, + boxShadow: customization.isDarkMode ? '0 0 50px 0 rgba(255, 255, 255, 0.5)' : '0 0 10px 0 rgba(0, 0, 0, 0.1)' + } + }} + > + + Pricing Plans + + + + + + + {pricingPlans.map((plan) => ( + + + plan.mostPopular + ? theme.palette.primary.main + : plan.currentPlan + ? theme.palette.success.main + : theme.palette.background.paper, + borderRadius: 2, + display: 'flex', + flexDirection: 'column', + minHeight: '450px', + position: 'relative', + boxShadow: customization.isDarkMode + ? '0 0 10px 0 rgba(255, 255, 255, 0.5)' + : '0 0 10px 0 rgba(0, 0, 0, 0.1)', + backgroundColor: (theme) => (plan.currentPlan ? alpha(theme.palette.success.main, 0.05) : 'inherit') + }} + > + {plan.currentPlan && ( + + + Current Plan + + + )} + {plan.mostPopular && !plan.currentPlan && ( + + + Most Popular + + + )} + + {plan.title} + + + {plan.subtitle} + + + + {plan.price} + + {plan.period && ( + + {plan.period} + + )} + + + {plan.features.map((feature, index) => ( + + + + {feature.text} + {feature.subtext && ( + + {feature.subtext} + + )} + + + ))} + + {plan.isStarterPlan && !plan.currentPlan && ( + + + First Month Free + + + )} + + + + ))} + + + + + + Confirm Plan Change + + + {purchasedSeats > 0 || occupiedSeats > 1 ? ( + + + You must remove additional seats and users before changing your plan. + + ) : workspaceCount > 1 ? ( + <> + + + You must remove all workspaces except the default workspace before changing your plan. + + + ) : ( + <> + {getCustomerDefaultSourceApi.loading ? ( + + ) : getCustomerDefaultSourceApi.data?.invoice_settings?.default_payment_method ? ( + + Payment Method + + {getCustomerDefaultSourceApi.data.invoice_settings.default_payment_method.card && ( + <> + + + + { + getCustomerDefaultSourceApi.data.invoice_settings.default_payment_method + .card.brand + } + + + ••••{' '} + { + getCustomerDefaultSourceApi.data.invoice_settings.default_payment_method + .card.last4 + } + + + (expires{' '} + { + getCustomerDefaultSourceApi.data.invoice_settings.default_payment_method + .card.exp_month + } + / + { + getCustomerDefaultSourceApi.data.invoice_settings.default_payment_method + .card.exp_year + } + ) + + + + )} + + + ) : ( + + + + No payment method found + + + + )} + + {getPlanProrationApi.loading && ( + + + + )} + + {prorationInfo && ( + + {/* Date Range */} + + {new Date(prorationInfo.currentPeriodStart * 1000).toLocaleDateString('en-US', { + month: 'short', + day: 'numeric' + })}{' '} + -{' '} + {new Date(prorationInfo.currentPeriodEnd * 1000).toLocaleDateString('en-US', { + month: 'short', + day: 'numeric', + year: 'numeric' + })} + + + {/* First Month Free Notice */} + {selectedPlan?.title === 'Starter' && prorationInfo.eligibleForFirstMonthFree && ( + + + {`You're eligible for your first month free!`} + + + )} + + {/* Base Plan */} + + {selectedPlan.title} Plan + + {prorationInfo.currency} {Math.max(0, prorationInfo.newPlanAmount).toFixed(2)} + + + + {selectedPlan?.title === 'Starter' && prorationInfo.eligibleForFirstMonthFree && ( + + First Month Discount + + -{prorationInfo.currency} {Math.max(0, prorationInfo.newPlanAmount).toFixed(2)} + + + )} + + {/* Credit Balance */} + {prorationInfo.prorationAmount > 0 && prorationInfo.creditBalance !== 0 && ( + + Applied account balance + + {prorationInfo.currency} {prorationInfo.creditBalance.toFixed(2)} + + + )} + + {prorationInfo.prorationAmount < 0 && ( + + Credit balance + + {prorationInfo.currency} {prorationInfo.prorationAmount < 0 ? '+' : ''} + {Math.abs(prorationInfo.prorationAmount).toFixed(2)} + + + )} + + {/* Next Payment */} + + Due today + + {prorationInfo.currency}{' '} + {Math.max(0, prorationInfo.prorationAmount + prorationInfo.creditBalance).toFixed(2)} + + + + {prorationInfo.prorationAmount < 0 && ( + + Your available credit will automatically apply to your next invoice. + + )} + + )} + + )} + + + {getCustomerDefaultSourceApi.data?.invoice_settings?.default_payment_method && ( + + + + + )} + + + ) +} + +PricingDialog.propTypes = { + open: PropTypes.bool, + onClose: PropTypes.func +} + +export default PricingDialog diff --git a/packages/ui/src/ui-component/table/FilesTable.jsx b/packages/ui/src/ui-component/table/FilesTable.jsx new file mode 100644 index 00000000000..71734c6baef --- /dev/null +++ b/packages/ui/src/ui-component/table/FilesTable.jsx @@ -0,0 +1,173 @@ +import PropTypes from 'prop-types' +import { useSelector } from 'react-redux' +import { styled } from '@mui/material/styles' +import { + IconButton, + Paper, + Skeleton, + Table, + TableBody, + TableCell, + TableContainer, + TableHead, + TableRow, + Tooltip, + Typography, + useTheme +} from '@mui/material' +import { tableCellClasses } from '@mui/material/TableCell' +import { IconTrash } from '@tabler/icons-react' + +const StyledTableCell = styled(TableCell)(({ theme }) => ({ + borderColor: theme.palette.grey[900] + 25, + + [`&.${tableCellClasses.head}`]: { + color: theme.palette.grey[900] + }, + [`&.${tableCellClasses.body}`]: { + fontSize: 14, + height: 64 + } +})) + +const StyledTableRow = styled(TableRow)(() => ({ + // hide last border + '&:last-child td, &:last-child th': { + border: 0 + } +})) + +export const FilesTable = ({ data, isLoading, filterFunction, handleDelete }) => { + const theme = useTheme() + const customization = useSelector((state) => state.customization) + + return ( + <> + + + + + + Name + + + Path + + + Size + + + Actions + + + + + {isLoading ? ( + <> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ) : ( + <> + {data?.filter(filterFunction).map((row, index) => ( + + + + + {row.name.split('/').pop()} + + + + + + + {row.path} + + + + + + {`${row.size.toFixed(2)} MB`} + + + + handleDelete(row)} size='small'> + + + + + ))} + + )} + +
    +
    + + ) +} + +FilesTable.propTypes = { + data: PropTypes.array, + images: PropTypes.object, + isLoading: PropTypes.bool, + filterFunction: PropTypes.func, + handleDelete: PropTypes.func +} diff --git a/packages/ui/src/ui-component/table/FlowListTable.jsx b/packages/ui/src/ui-component/table/FlowListTable.jsx index 01ab9243639..43087d61fcd 100644 --- a/packages/ui/src/ui-component/table/FlowListTable.jsx +++ b/packages/ui/src/ui-component/table/FlowListTable.jsx @@ -23,6 +23,9 @@ import { import { tableCellClasses } from '@mui/material/TableCell' import FlowListMenu from '../button/FlowListMenu' import { Link } from 'react-router-dom' +import { useAuth } from '@/hooks/useAuth' + +import MoreItemsTooltip from '../tooltip/MoreItemsTooltip' const StyledTableCell = styled(TableCell)(({ theme }) => ({ borderColor: theme.palette.grey[900] + 25, @@ -47,7 +50,21 @@ const getLocalStorageKeyName = (name, isAgentCanvas) => { return (isAgentCanvas ? 'agentcanvas' : 'chatflowcanvas') + '_' + name } -export const FlowListTable = ({ data, images = {}, icons = {}, isLoading, filterFunction, updateFlowsApi, setError, isAgentCanvas }) => { +export const FlowListTable = ({ + data, + images = {}, + icons = {}, + isLoading, + filterFunction, + updateFlowsApi, + setError, + isAgentCanvas, + isAgentflowV2 +}) => { + const { hasPermission } = useAuth() + const isActionsAvailable = isAgentCanvas + ? hasPermission('agentflows:update,agentflows:delete,agentflows:config,agentflows:domains,templates:flowexport,agentflows:export') + : hasPermission('chatflows:update,chatflows:delete,chatflows:config,chatflows:domains,templates:flowexport,chatflows:export') const theme = useTheme() const customization = useSelector((state) => state.customization) @@ -118,9 +135,11 @@ export const FlowListTable = ({ data, images = {}, icons = {}, isLoading, filter Last Modified Date - - Actions - + {isActionsAvailable && ( + + Actions + + )} @@ -139,9 +158,11 @@ export const FlowListTable = ({ data, images = {}, icons = {}, isLoading, filter - - - + {isActionsAvailable && ( + + + + )} @@ -156,9 +177,11 @@ export const FlowListTable = ({ data, images = {}, icons = {}, isLoading, filter - - - + {isActionsAvailable && ( + + + + )} ) : ( @@ -213,64 +236,80 @@ export const FlowListTable = ({ data, images = {}, icons = {}, isLoading, filter }} > {[ - ...(images[row.id] || []).map((img) => ({ type: 'image', src: img })), + ...(images[row.id] || []).map((img) => ({ + type: 'image', + src: img.imageSrc, + label: img.label + })), ...(icons[row.id] || []).map((ic) => ({ type: 'icon', icon: ic.icon, - color: ic.color + color: ic.color, + title: ic.name })) ] .slice(0, 5) - .map((item, index) => - item.type === 'image' ? ( - - ( + + {item.type === 'image' ? ( + + + + ) : ( +
    - - ) : ( -
    - -
    - ) - )} + > + +
    + )} +
    + ))} + {(images[row.id]?.length || 0) + (icons[row.id]?.length || 0) > 5 && ( - ({ label: ic.name })) + ]} > - + {(images[row.id]?.length || 0) + (icons[row.id]?.length || 0) - 5} More - + + + {(images[row.id]?.length || 0) + (icons[row.id]?.length || 0) - 5} More + + )}
    )} @@ -278,21 +317,24 @@ export const FlowListTable = ({ data, images = {}, icons = {}, isLoading, filter {moment(row.updatedDate).format('MMMM Do, YYYY HH:mm:ss')} - - - - - + {isActionsAvailable && ( + + + + + + )} ))} @@ -312,5 +354,6 @@ FlowListTable.propTypes = { filterFunction: PropTypes.func, updateFlowsApi: PropTypes.object, setError: PropTypes.func, - isAgentCanvas: PropTypes.bool + isAgentCanvas: PropTypes.bool, + isAgentflowV2: PropTypes.bool } diff --git a/packages/ui/src/ui-component/table/MarketplaceTable.jsx b/packages/ui/src/ui-component/table/MarketplaceTable.jsx index bc252f7c189..52b1e68c532 100644 --- a/packages/ui/src/ui-component/table/MarketplaceTable.jsx +++ b/packages/ui/src/ui-component/table/MarketplaceTable.jsx @@ -15,10 +15,10 @@ import { TableRow, Typography, Stack, - useTheme, - IconButton + useTheme } from '@mui/material' -import { IconTrash } from '@tabler/icons-react' +import { IconShare, IconTrash } from '@tabler/icons-react' +import { PermissionIconButton } from '@/ui-component/button/RBACButtons' const StyledTableCell = styled(TableCell)(({ theme }) => ({ borderColor: theme.palette.grey[900] + 25, @@ -49,7 +49,8 @@ export const MarketplaceTable = ({ goToCanvas, goToTool, isLoading, - onDelete + onDelete, + onShare }) => { const theme = useTheme() const customization = useSelector((state) => state.customization) @@ -86,15 +87,8 @@ export const MarketplaceTable = ({ Use cases - Nodes - -   - - {onDelete && ( - - Delete - - )} + Badges + @@ -122,11 +116,6 @@ export const MarketplaceTable = ({ - {onDelete && ( - - - - )} @@ -150,11 +139,6 @@ export const MarketplaceTable = ({ - {onDelete && ( - - - - )} ) : ( @@ -223,20 +207,6 @@ export const MarketplaceTable = ({ - - {row.categories && - row.categories.map((tag, index) => ( - - ))} - - - {row.badge && row.badge @@ -252,13 +222,35 @@ export const MarketplaceTable = ({ ))} - {onDelete && ( - - onDelete(row)}> - - - - )} + + {row.shared ? ( + Shared Template + ) : ( + <> + {onShare && ( + onShare(row)} + > + + + )} + {onDelete && ( + onDelete(row)} + > + + + )} + + )} + ))} @@ -280,5 +272,6 @@ MarketplaceTable.propTypes = { goToTool: PropTypes.func, goToCanvas: PropTypes.func, isLoading: PropTypes.bool, - onDelete: PropTypes.func + onDelete: PropTypes.func, + onShare: PropTypes.func } diff --git a/packages/ui/src/ui-component/table/TableStyles.jsx b/packages/ui/src/ui-component/table/TableStyles.jsx new file mode 100644 index 00000000000..45cee6a1d5d --- /dev/null +++ b/packages/ui/src/ui-component/table/TableStyles.jsx @@ -0,0 +1,22 @@ +import { styled } from '@mui/material/styles' +import { TableCell, TableRow } from '@mui/material' +import { tableCellClasses } from '@mui/material/TableCell' + +export const StyledTableCell = styled(TableCell)(({ theme }) => ({ + borderColor: theme.palette.grey[900] + 25, + + [`&.${tableCellClasses.head}`]: { + color: theme.palette.grey[900] + }, + [`&.${tableCellClasses.body}`]: { + fontSize: 14, + height: 64 + } +})) + +export const StyledTableRow = styled(TableRow)(() => ({ + // hide last border + '&:last-child td, &:last-child th': { + border: 0 + } +})) diff --git a/packages/ui/src/ui-component/tooltip/MoreItemsTooltip.jsx b/packages/ui/src/ui-component/tooltip/MoreItemsTooltip.jsx new file mode 100644 index 00000000000..260240453f3 --- /dev/null +++ b/packages/ui/src/ui-component/tooltip/MoreItemsTooltip.jsx @@ -0,0 +1,40 @@ +import { Tooltip, Typography } from '@mui/material' +import { styled } from '@mui/material/styles' +import PropTypes from 'prop-types' + +const StyledOl = styled('ol')(() => ({ + paddingLeft: 20, + margin: 0 +})) + +const StyledLi = styled('li')(() => ({ + paddingBottom: 4 +})) + +const MoreItemsTooltip = ({ images, children }) => { + if (!images || images.length === 0) return children + + return ( + + {images.map((img) => ( + + {img.label} + + ))} + + } + placement='top' + > + {children} + + ) +} + +export default MoreItemsTooltip + +MoreItemsTooltip.propTypes = { + images: PropTypes.array, + children: PropTypes.node +} diff --git a/packages/ui/src/utils/authUtils.js b/packages/ui/src/utils/authUtils.js new file mode 100644 index 00000000000..5d40a48025d --- /dev/null +++ b/packages/ui/src/utils/authUtils.js @@ -0,0 +1,81 @@ +const getCurrentUser = () => { + if (!localStorage.getItem('user') || localStorage.getItem('user') === 'undefined') return undefined + return JSON.parse(localStorage.getItem('user')) +} + +const updateCurrentUser = (user) => { + let stringifiedUser = user + if (typeof user === 'object') { + stringifiedUser = JSON.stringify(user) + } + localStorage.setItem('user', stringifiedUser) +} + +const removeCurrentUser = () => { + _removeFromStorage() + clearAllCookies() +} + +const _removeFromStorage = () => { + localStorage.removeItem('isAuthenticated') + localStorage.removeItem('isGlobal') + localStorage.removeItem('user') + localStorage.removeItem('permissions') + localStorage.removeItem('features') + localStorage.removeItem('isSSO') +} + +const clearAllCookies = () => { + document.cookie.split(';').forEach((cookie) => { + const name = cookie.split('=')[0].trim() + document.cookie = `${name}=; expires=Thu, 01 Jan 1970 00:00:00 UTC; path=/` + }) +} + +const extractUser = (payload) => { + const user = { + id: payload.id, + email: payload.email, + name: payload.name, + status: payload.status, + role: payload.role, + isSSO: payload.isSSO, + activeOrganizationId: payload.activeOrganizationId, + activeOrganizationSubscriptionId: payload.activeOrganizationSubscriptionId, + activeOrganizationCustomerId: payload.activeOrganizationCustomerId, + activeOrganizationProductId: payload.activeOrganizationProductId, + activeWorkspaceId: payload.activeWorkspaceId, + activeWorkspace: payload.activeWorkspace, + lastLogin: payload.lastLogin, + isOrganizationAdmin: payload.isOrganizationAdmin, + assignedWorkspaces: payload.assignedWorkspaces, + permissions: payload.permissions + } + return user +} + +const updateStateAndLocalStorage = (state, payload) => { + const user = extractUser(payload) + state.user = user + state.token = payload.token + state.permissions = payload.permissions + state.features = payload.features + state.isAuthenticated = true + state.isGlobal = user.isOrganizationAdmin + localStorage.setItem('isAuthenticated', 'true') + localStorage.setItem('isGlobal', state.isGlobal) + localStorage.setItem('isSSO', state.user.isSSO) + localStorage.setItem('user', JSON.stringify(user)) + localStorage.setItem('permissions', JSON.stringify(payload.permissions)) + localStorage.setItem('features', JSON.stringify(payload.features)) +} + +const AuthUtils = { + getCurrentUser, + updateCurrentUser, + removeCurrentUser, + updateStateAndLocalStorage, + extractUser +} + +export default AuthUtils diff --git a/packages/ui/src/utils/exportImport.js b/packages/ui/src/utils/exportImport.js index 1d8b3005fa2..0eafc9f3a8e 100644 --- a/packages/ui/src/utils/exportImport.js +++ b/packages/ui/src/utils/exportImport.js @@ -66,6 +66,37 @@ const sanitizeAssistant = (Assistant) => { } } +const sanitizeCustomTemplate = (CustomTemplate) => { + try { + return CustomTemplate.map((customTemplate) => { + return { ...customTemplate, usecases: JSON.stringify(customTemplate.usecases), workspaceId: undefined } + }) + } catch (error) { + throw new Error(`exportImport.sanitizeCustomTemplate ${getErrorMessage(error)}`) + } +} + +const sanitizeDocumentStore = (DocumentStore) => { + try { + return DocumentStore.map((documentStore) => { + return { ...documentStore, workspaceId: undefined } + }) + } catch (error) { + throw new Error(`exportImport.sanitizeDocumentStore ${getErrorMessage(error)}`) + } +} + +const sanitizeExecution = (Execution) => { + try { + return Execution.map((execution) => { + execution.agentflow.workspaceId = undefined + return { ...execution, workspaceId: undefined } + }) + } catch (error) { + throw new Error(`exportImport.sanitizeExecution ${getErrorMessage(error)}`) + } +} + export const stringify = (object) => { try { return JSON.stringify(object, null, 2) @@ -86,10 +117,10 @@ export const exportData = (exportAllData) => { ChatFlow: sanitizeChatflow(exportAllData.ChatFlow), ChatMessage: exportAllData.ChatMessage, ChatMessageFeedback: exportAllData.ChatMessageFeedback, - CustomTemplate: exportAllData.CustomTemplate, - DocumentStore: exportAllData.DocumentStore, + CustomTemplate: sanitizeCustomTemplate(exportAllData.CustomTemplate), + DocumentStore: sanitizeDocumentStore(exportAllData.DocumentStore), DocumentStoreFileChunk: exportAllData.DocumentStoreFileChunk, - Execution: exportAllData.Execution, + Execution: sanitizeExecution(exportAllData.Execution), Tool: sanitizeTool(exportAllData.Tool), Variable: sanitizeVariable(exportAllData.Variable) } diff --git a/packages/ui/src/utils/genericHelper.js b/packages/ui/src/utils/genericHelper.js index 97542114eff..dae926a8162 100644 --- a/packages/ui/src/utils/genericHelper.js +++ b/packages/ui/src/utils/genericHelper.js @@ -982,6 +982,18 @@ export const kFormatter = (num) => { return item ? (num / item.value).toFixed(1).replace(regexp, '').concat(item.symbol) : '0' } +export const redirectWhenUnauthorized = ({ error, redirectTo }) => { + if (error === 'unauthorized') { + window.location.href = redirectTo + } else if (error === 'subscription_canceled') { + window.location.href = `${redirectTo}?error=${error}` + } +} + +export const truncateString = (str, maxLength) => { + return str.length > maxLength ? `${str.slice(0, maxLength - 3)}...` : str +} + const toCamelCase = (str) => { return str .split(' ') // Split by space to process each word diff --git a/packages/ui/src/utils/validation.js b/packages/ui/src/utils/validation.js new file mode 100644 index 00000000000..a889919cf81 --- /dev/null +++ b/packages/ui/src/utils/validation.js @@ -0,0 +1,17 @@ +import { z } from 'zod' + +export const passwordSchema = z + .string() + .min(8, 'Password must be at least 8 characters') + .regex(/[a-z]/, 'Password must contain at least one lowercase letter') + .regex(/[A-Z]/, 'Password must contain at least one uppercase letter') + .regex(/\d/, 'Password must contain at least one digit') + .regex(/[@$!%*?&-]/, 'Password must contain at least one special character (@$!%*?&-)') + +export const validatePassword = (password) => { + const result = passwordSchema.safeParse(password) + if (!result.success) { + return result.error.errors.map((err) => err.message) + } + return [] +} diff --git a/packages/ui/src/views/account/UserProfile.jsx b/packages/ui/src/views/account/UserProfile.jsx new file mode 100644 index 00000000000..560aafbae96 --- /dev/null +++ b/packages/ui/src/views/account/UserProfile.jsx @@ -0,0 +1,294 @@ +import { useEffect, useState } from 'react' +import { useDispatch, useSelector } from 'react-redux' + +// material-ui +import { Box, Button, OutlinedInput, Stack, Typography } from '@mui/material' + +// project imports +import ErrorBoundary from '@/ErrorBoundary' +import ViewHeader from '@/layout/MainLayout/ViewHeader' +import { StyledButton } from '@/ui-component/button/StyledButton' +import MainCard from '@/ui-component/cards/MainCard' +import SettingsSection from '@/ui-component/form/settings' +import { BackdropLoader } from '@/ui-component/loading/BackdropLoader' + +// API +import userApi from '@/api/user' +import useApi from '@/hooks/useApi' + +// Store +import { store } from '@/store' +import { closeSnackbar as closeSnackbarAction, enqueueSnackbar as enqueueSnackbarAction } from '@/store/actions' +import { gridSpacing } from '@/store/constant' +import { useError } from '@/store/context/ErrorContext' +import { userProfileUpdated } from '@/store/reducers/authSlice' + +// utils +import useNotifier from '@/utils/useNotifier' +import { validatePassword } from '@/utils/validation' + +// Icons +import { IconAlertTriangle, IconX } from '@tabler/icons-react' + +const UserProfile = () => { + useNotifier() + const { error, setError } = useError() + + const dispatch = useDispatch() + const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) + const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) + + const currentUser = useSelector((state) => state.auth.user) + const isAuthenticated = useSelector((state) => state.auth.isAuthenticated) + + const [newPasswordVal, setNewPasswordVal] = useState('') + const [confirmPasswordVal, setConfirmPasswordVal] = useState('') + const [usernameVal, setUsernameVal] = useState('') + const [emailVal, setEmailVal] = useState('') + + const [loading, setLoading] = useState(false) + const [authErrors, setAuthErrors] = useState([]) + + const getUserApi = useApi(userApi.getUserById) + + const validateAndSubmit = async () => { + const validationErrors = [] + setAuthErrors([]) + if (!isAuthenticated) { + validationErrors.push('User is not authenticated') + } + if (currentUser.isSSO) { + validationErrors.push('User is a SSO user, unable to update details') + } + if (!usernameVal) { + validationErrors.push('Name cannot be left blank!') + } + if (!emailVal) { + validationErrors.push('Email cannot be left blank!') + } + if (newPasswordVal || confirmPasswordVal) { + if (newPasswordVal !== confirmPasswordVal) { + validationErrors.push('New Password and Confirm Password do not match') + } + const passwordErrors = validatePassword(newPasswordVal) + if (passwordErrors.length > 0) { + validationErrors.push(...passwordErrors) + } + } + if (validationErrors.length > 0) { + setAuthErrors(validationErrors) + return + } + const body = { + id: currentUser.id, + email: emailVal, + name: usernameVal + } + if (newPasswordVal) body.password = newPasswordVal + setLoading(true) + try { + const updateResponse = await userApi.updateUser(body) + setAuthErrors([]) + setLoading(false) + if (updateResponse.data) { + store.dispatch(userProfileUpdated(updateResponse.data)) + enqueueSnackbar({ + message: 'User Details Updated!', + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + } + } catch (error) { + setLoading(false) + setAuthErrors([typeof error.response.data === 'object' ? error.response.data.message : error.response.data]) + enqueueSnackbar({ + message: `Failed to update user details`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + } + } + + useEffect(() => { + if (getUserApi.data) { + const user = getUserApi.data + setEmailVal(user.email) + setUsernameVal(user.name) + setLoading(false) + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getUserApi.data]) + + useEffect(() => { + if (getUserApi.error) { + setLoading(false) + setError(getUserApi.error) + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getUserApi.error]) + + useEffect(() => { + setLoading(true) + getUserApi.request(currentUser.id) + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []) + + return ( + <> + + {error ? ( + + ) : ( + + + {authErrors && authErrors.length > 0 && ( +
    + + + + + +
      + {authErrors.map((msg, key) => ( + +
    • {msg}
    • +
      + ))} +
    +
    +
    +
    + )} + + Save +
    + } + title='Profile' + > + + +
    + Email +
    +
    + setEmailVal(e.target.value)} + value={emailVal} + /> +
    + +
    + + Full Name * + +
    +
    + setUsernameVal(e.target.value)} + value={usernameVal} + /> +
    + +
    + + New Password * + +
    +
    + setNewPasswordVal(e.target.value)} + value={newPasswordVal} + /> + + + Password must be at least 8 characters long and contain at least one lowercase letter, one + uppercase letter, one digit, and one special character (@$!%*?&-). + + +
    + +
    + + Confirm Password * + +
    +
    + setConfirmPasswordVal(e.target.value)} + value={confirmPasswordVal} + /> + + Retype your new password. Must match the password typed above. + +
    +
    + + + )} + + {loading && } + + ) +} + +export default UserProfile diff --git a/packages/ui/src/views/account/index.jsx b/packages/ui/src/views/account/index.jsx new file mode 100644 index 00000000000..24d460aefd8 --- /dev/null +++ b/packages/ui/src/views/account/index.jsx @@ -0,0 +1,1442 @@ +import { useEffect, useMemo, useState } from 'react' +import { useDispatch, useSelector } from 'react-redux' +import { useNavigate } from 'react-router-dom' + +// utils +import useNotifier from '@/utils/useNotifier' +import { validatePassword } from '@/utils/validation' + +// material-ui +import { + Box, + Button, + CircularProgress, + Dialog, + DialogActions, + DialogContent, + DialogTitle, + LinearProgress, + OutlinedInput, + Skeleton, + Stack, + TextField, + Typography +} from '@mui/material' +import { darken, useTheme } from '@mui/material/styles' + +// project imports +import ErrorBoundary from '@/ErrorBoundary' +import ViewHeader from '@/layout/MainLayout/ViewHeader' +import { StyledButton } from '@/ui-component/button/StyledButton' +import MainCard from '@/ui-component/cards/MainCard' +import SettingsSection from '@/ui-component/form/settings' +import PricingDialog from '@/ui-component/subscription/PricingDialog' + +// Icons +import { IconAlertCircle, IconCreditCard, IconExternalLink, IconSparkles, IconX } from '@tabler/icons-react' + +// API +import accountApi from '@/api/account.api' +import pricingApi from '@/api/pricing' +import userApi from '@/api/user' + +// Hooks +import useApi from '@/hooks/useApi' + +// Store +import { store } from '@/store' +import { closeSnackbar as closeSnackbarAction, enqueueSnackbar as enqueueSnackbarAction } from '@/store/actions' +import { gridSpacing } from '@/store/constant' +import { useConfig } from '@/store/context/ConfigContext' +import { useError } from '@/store/context/ErrorContext' +import { userProfileUpdated } from '@/store/reducers/authSlice' + +// ==============================|| ACCOUNT SETTINGS ||============================== // + +const calculatePercentage = (count, total) => { + return Math.min((count / total) * 100, 100) +} + +const AccountSettings = () => { + const theme = useTheme() + const dispatch = useDispatch() + useNotifier() + const navigate = useNavigate() + + const currentUser = useSelector((state) => state.auth.user) + const customization = useSelector((state) => state.customization) + + const { error, setError } = useError() + const { isCloud } = useConfig() + + const [isLoading, setLoading] = useState(true) + const [profileName, setProfileName] = useState('') + const [email, setEmail] = useState('') + const [migrateEmail, setMigrateEmail] = useState('') + const [newPassword, setNewPassword] = useState('') + const [confirmPassword, setConfirmPassword] = useState('') + const [usage, setUsage] = useState(null) + const [isBillingLoading, setIsBillingLoading] = useState(false) + const [isMigrateLoading, setIsMigrateLoading] = useState(false) + const [seatsQuantity, setSeatsQuantity] = useState(0) + const [prorationInfo, setProrationInfo] = useState(null) + const [isUpdatingSeats, setIsUpdatingSeats] = useState(false) + const [openPricingDialog, setOpenPricingDialog] = useState(false) + const [openRemoveSeatsDialog, setOpenRemoveSeatsDialog] = useState(false) + const [openAddSeatsDialog, setOpenAddSeatsDialog] = useState(false) + const [includedSeats, setIncludedSeats] = useState(0) + const [purchasedSeats, setPurchasedSeats] = useState(0) + const [occupiedSeats, setOccupiedSeats] = useState(0) + const [totalSeats, setTotalSeats] = useState(0) + + const predictionsUsageInPercent = useMemo(() => { + return usage ? calculatePercentage(usage.predictions?.usage, usage.predictions?.limit) : 0 + }, [usage]) + const storageUsageInPercent = useMemo(() => { + return usage ? calculatePercentage(usage.storage?.usage, usage.storage?.limit) : 0 + }, [usage]) + + const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) + const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) + + const getUserByIdApi = useApi(userApi.getUserById) + const getPricingPlansApi = useApi(pricingApi.getPricingPlans) + const getAdditionalSeatsQuantityApi = useApi(userApi.getAdditionalSeatsQuantity) + const getAdditionalSeatsProrationApi = useApi(userApi.getAdditionalSeatsProration) + const getCustomerDefaultSourceApi = useApi(userApi.getCustomerDefaultSource) + const updateAdditionalSeatsApi = useApi(userApi.updateAdditionalSeats) + const getCurrentUsageApi = useApi(userApi.getCurrentUsage) + + useEffect(() => { + if (isCloud) { + getUserByIdApi.request(currentUser.id) + getPricingPlansApi.request() + getAdditionalSeatsQuantityApi.request(currentUser?.activeOrganizationSubscriptionId) + getCurrentUsageApi.request() + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [isCloud]) + + useEffect(() => { + setLoading(getUserByIdApi.loading) + }, [getUserByIdApi.loading]) + + useEffect(() => { + try { + if (getUserByIdApi.data) { + setProfileName(getUserByIdApi.data?.name || '') + setEmail(getUserByIdApi.data?.email || '') + setMigrateEmail(getUserByIdApi.data?.email || '') + } + } catch (e) { + console.error(e) + } + }, [getUserByIdApi.data]) + + useEffect(() => { + if (getCurrentUsageApi.data) { + setUsage(getCurrentUsageApi.data) + } + }, [getCurrentUsageApi.data]) + + useEffect(() => { + if (openRemoveSeatsDialog || openAddSeatsDialog) { + setSeatsQuantity(0) + getCustomerDefaultSourceApi.request(currentUser?.activeOrganizationCustomerId) + } + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [openRemoveSeatsDialog, openAddSeatsDialog]) + + useEffect(() => { + if (getAdditionalSeatsProrationApi.data) { + setProrationInfo(getAdditionalSeatsProrationApi.data) + } + }, [getAdditionalSeatsProrationApi.data]) + + useEffect(() => { + if (!getAdditionalSeatsQuantityApi.loading && getAdditionalSeatsQuantityApi.data) { + const included = getAdditionalSeatsQuantityApi.data?.includedSeats || 1 + const purchased = getAdditionalSeatsQuantityApi.data?.quantity || 0 + const occupied = getAdditionalSeatsQuantityApi.data?.totalOrgUsers || 1 + + setIncludedSeats(included) + setPurchasedSeats(purchased) + setOccupiedSeats(occupied) + setTotalSeats(included + purchased) + } + }, [getAdditionalSeatsQuantityApi.data, getAdditionalSeatsQuantityApi.loading]) + + const currentPlanTitle = useMemo(() => { + if (!getPricingPlansApi.data) return '' + const currentPlan = getPricingPlansApi.data.find((plan) => plan.prodId === currentUser?.activeOrganizationProductId) + return currentPlan?.title || '' + }, [getPricingPlansApi.data, currentUser?.activeOrganizationProductId]) + + const handleMigrateEmail = async () => { + setIsMigrateLoading(true) + try { + const obj = { + email: migrateEmail + } + const resp = await accountApi.cancelSubscription(obj) + if (resp.status === 200) { + enqueueSnackbar({ + message: `Instruction to cancel subscription has been sent to ${migrateEmail}`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + } + } catch (error) { + enqueueSnackbar({ + message: 'Failed to access billing portal', + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + action: (key) => ( + + ) + } + }) + } finally { + setIsMigrateLoading(false) + } + } + + const handleBillingPortalClick = async () => { + setIsBillingLoading(true) + try { + const resp = await accountApi.getBillingData() + if (resp.data?.url) { + window.open(resp.data.url, '_blank') + } + } catch (error) { + enqueueSnackbar({ + message: 'Failed to access billing portal', + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + action: (key) => ( + + ) + } + }) + } finally { + setIsBillingLoading(false) + } + } + + const saveProfileData = async () => { + try { + const obj = { + id: currentUser.id, + name: profileName, + email: email + } + const saveProfileResp = await userApi.updateUser(obj) + if (saveProfileResp.data) { + store.dispatch(userProfileUpdated(saveProfileResp.data)) + enqueueSnackbar({ + message: 'Profile updated', + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + } + } catch (error) { + setError(error) + enqueueSnackbar({ + message: `Failed to update profile: ${ + typeof error.response.data === 'object' ? error.response.data.message : error.response.data + }`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + } + } + + const savePassword = async () => { + try { + const validationErrors = [] + if (newPassword !== confirmPassword) { + validationErrors.push('New Password and Confirm Password do not match') + } + const passwordErrors = validatePassword(newPassword) + if (passwordErrors.length > 0) { + validationErrors.push(...passwordErrors) + } + if (validationErrors.length > 0) { + enqueueSnackbar({ + message: validationErrors.join(', '), + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + return + } + + const obj = { + id: currentUser.id, + password: newPassword + } + const saveProfileResp = await userApi.updateUser(obj) + if (saveProfileResp.data) { + store.dispatch(userProfileUpdated(saveProfileResp.data)) + enqueueSnackbar({ + message: 'Password updated', + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + } + } catch (error) { + setError(error) + enqueueSnackbar({ + message: `Failed to update password: ${ + typeof error.response.data === 'object' ? error.response.data.message : error.response.data + }`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + } + } + + const handleSeatsModification = async (newSeatsAmount) => { + try { + setIsUpdatingSeats(true) + + if (!prorationInfo?.prorationDate) { + throw new Error('No proration date available') + } + + await updateAdditionalSeatsApi.request( + currentUser?.activeOrganizationSubscriptionId, + newSeatsAmount, + prorationInfo.prorationDate + ) + enqueueSnackbar({ + message: 'Seats updated successfully', + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + // Refresh the seats quantity display + getAdditionalSeatsQuantityApi.request(currentUser?.activeOrganizationSubscriptionId) + } catch (error) { + console.error('Error updating seats:', error) + enqueueSnackbar({ + message: `Failed to update seats: ${ + typeof error.response.data === 'object' ? error.response.data.message : error.response.data + }`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + } finally { + setIsUpdatingSeats(false) + setProrationInfo(null) + setOpenAddSeatsDialog(false) + setOpenRemoveSeatsDialog(false) + setSeatsQuantity(0) + } + } + + const handleQuantityChange = (value, operation) => { + setSeatsQuantity(value) + // Calculate proration for the new quantity + const totalAdditionalSeats = operation === 'add' ? purchasedSeats + value : purchasedSeats - value + if (currentUser?.activeOrganizationSubscriptionId) { + getAdditionalSeatsProrationApi.request(currentUser.activeOrganizationSubscriptionId, totalAdditionalSeats) + } + } + + const handleRemoveSeatsDialogClose = () => { + if (!isUpdatingSeats) { + setProrationInfo(null) + setOpenRemoveSeatsDialog(false) + setSeatsQuantity(0) + } + } + + const handleAddSeatsDialogClose = () => { + if (!isUpdatingSeats) { + setProrationInfo(null) + setOpenAddSeatsDialog(false) + setSeatsQuantity(0) + } + } + + // Calculate empty seats + const emptySeats = Math.min(purchasedSeats, totalSeats - occupiedSeats) + + return ( + + {error ? ( + + ) : ( + + + {isLoading && !getUserByIdApi.data ? ( + + + + + + + + + + + + + + + + ) : ( + <> + + + + {currentPlanTitle && ( + + Current Organization Plan: + + {currentPlanTitle.toUpperCase()} + + + )} + + Update your billing details and subscription + + + + + + + + + + + + + Seats Included in Plan: + + {getAdditionalSeatsQuantityApi.loading ? : includedSeats} + + + + Additional Seats Purchased: + + {getAdditionalSeatsQuantityApi.loading ? : purchasedSeats} + + + + Occupied Seats: + + {getAdditionalSeatsQuantityApi.loading ? ( + + ) : ( + `${occupiedSeats}/${totalSeats}` + )} + + + + + {getAdditionalSeatsQuantityApi.data?.quantity > 0 && currentPlanTitle.toUpperCase() === 'PRO' && ( + + )} + { + if (currentPlanTitle.toUpperCase() === 'PRO') { + setOpenAddSeatsDialog(true) + } else { + setOpenPricingDialog(true) + } + }} + title='Add Seats is available only for PRO plan' + sx={{ borderRadius: 2, height: 40 }} + > + Add Seats + + + + + + + + + Predictions + + {`${usage?.predictions?.usage || 0} / ${usage?.predictions?.limit || 0}`} + + + + + { + if (predictionsUsageInPercent > 90) return theme.palette.error.main + if (predictionsUsageInPercent > 75) return theme.palette.warning.main + if (predictionsUsageInPercent > 50) return theme.palette.success.light + return theme.palette.success.main + } + } + }} + value={predictionsUsageInPercent > 100 ? 100 : predictionsUsageInPercent} + variant='determinate' + /> + + {`${predictionsUsageInPercent.toFixed( + 2 + )}%`} + + + + + Storage + + {`${(usage?.storage?.usage || 0).toFixed(2)}MB / ${(usage?.storage?.limit || 0).toFixed( + 2 + )}MB`} + + + + + { + if (storageUsageInPercent > 90) return theme.palette.error.main + if (storageUsageInPercent > 75) return theme.palette.warning.main + if (storageUsageInPercent > 50) return theme.palette.success.light + return theme.palette.success.main + } + } + }} + value={storageUsageInPercent > 100 ? 100 : storageUsageInPercent} + variant='determinate' + /> + + {`${storageUsageInPercent.toFixed( + 2 + )}%`} + + + + + + Save + + } + title='Profile' + > + + + Name + setProfileName(e.target.value)} + value={profileName} + /> + + + Email Address + setEmail(e.target.value)} + value={email} + /> + + + + {!currentUser.isSSO && ( + + Save + + } + title='Security' + > + + + New Password + setNewPassword(e.target.value)} + value={newPassword} + /> + + + Password must be at least 8 characters long and contain at least one lowercase letter, + one uppercase letter, one digit, and one special character (@$!%*?&-). + + + + + Confirm Password + setConfirmPassword(e.target.value)} + value={confirmPassword} + /> + + + + )} + + + + Migrate from existing cloud subscription? + + {`If you have an existing cloud app like .app.flowiseai.com, after finished migrating your work, you can cancel the previous subscription. We'll send you an email with a link to cancel your previous subscription.`} + + + setMigrateEmail(e.target.value)} + value={migrateEmail} + /> + + + + + + + + + )} + + )} + {openPricingDialog && isCloud && ( + { + setOpenPricingDialog(false) + if (planUpdated) { + navigate('/') + navigate(0) + } + }} + /> + )} + {/* Remove Seats Dialog */} + + Remove Additional Seats + + + {emptySeats === 0 ? ( + + + You must remove users from your organization before removing seats. + + ) : ( + + {/* Occupied Seats */} + + Occupied Seats + {occupiedSeats} + + + {/* Empty Seats */} + + Empty Seats + {emptySeats} + + + + Number of Empty Seats to Remove + { + const value = Math.max(0, Math.min(emptySeats, parseInt(e.target.value) || 0)) + handleQuantityChange(value, 'remove') + }} + onKeyDown={(e) => { + if (e.key === '-' || e.key === 'e') { + e.preventDefault() + } + }} + InputProps={{ + inputProps: { + min: 0, + max: emptySeats, + step: 1 + } + }} + sx={{ width: '70px' }} + disabled={!getCustomerDefaultSourceApi.data} + /> + + + {/* Total Seats */} + + New Total Seats + {totalSeats - seatsQuantity} + + + )} + + {getAdditionalSeatsProrationApi.loading && ( + + + + )} + + {getCustomerDefaultSourceApi.loading ? ( + + ) : getCustomerDefaultSourceApi.data?.invoice_settings?.default_payment_method ? ( + + Payment Method + + {getCustomerDefaultSourceApi.data.invoice_settings.default_payment_method.card && ( + <> + + + + {getCustomerDefaultSourceApi.data.invoice_settings.default_payment_method.card.brand} + + + ••••{' '} + {getCustomerDefaultSourceApi.data.invoice_settings.default_payment_method.card.last4} + + + (expires{' '} + { + getCustomerDefaultSourceApi.data.invoice_settings.default_payment_method.card + .exp_month + } + / + {getCustomerDefaultSourceApi.data.invoice_settings.default_payment_method.card.exp_year} + ) + + + + )} + + + ) : ( + + + + No payment method found + + + + )} + + {/* Proration info */} + {prorationInfo && ( + + {/* Date Range */} + + {new Date(prorationInfo.currentPeriodStart * 1000).toLocaleDateString('en-US', { + month: 'short', + day: 'numeric' + })}{' '} + -{' '} + {new Date(prorationInfo.currentPeriodEnd * 1000).toLocaleDateString('en-US', { + month: 'short', + day: 'numeric', + year: 'numeric' + })} + + + {/* Base Plan */} + + {currentPlanTitle} + + {prorationInfo.currency} {Math.max(0, prorationInfo.basePlanAmount).toFixed(2)} + + + + {/* Additional Seats */} + + + Additional Seats Left (Prorated) + + Qty {purchasedSeats - seatsQuantity} + + + + + {prorationInfo.currency} {Math.max(0, prorationInfo.additionalSeatsProratedAmount).toFixed(2)} + + + {prorationInfo.currency} {prorationInfo.seatPerUnitPrice.toFixed(2)} each + + + + + {prorationInfo.prorationAmount < 0 && ( + + Credit balance + + {prorationInfo.currency} {prorationInfo.prorationAmount < 0 ? '+' : ''} + {Math.abs(prorationInfo.prorationAmount).toFixed(2)} + + + )} + + {/* Next Payment */} + + Due today + + {prorationInfo.currency} {Math.max(0, prorationInfo.prorationAmount).toFixed(2)} + + + + {prorationInfo.prorationAmount < 0 && ( + + Your available credit will automatically apply to your next invoice. + + )} + + )} + + + {getCustomerDefaultSourceApi.data?.invoice_settings?.default_payment_method && ( + + + + + )} + + {/* Add Seats Dialog */} + + Add Additional Seats + + + + {/* Occupied Seats */} + + Occupied Seats + {occupiedSeats} + + + {/* Included Seats */} + + Seats Included with Plan + {includedSeats} + + + {/* Additional Seats */} + + Additional Seats Purchased + {purchasedSeats} + + + + Number of Additional Seats to Add + { + const value = Math.max(0, parseInt(e.target.value) || 0) + handleQuantityChange(value, 'add') + }} + onKeyDown={(e) => { + if (e.key === '-' || e.key === 'e') { + e.preventDefault() + } + }} + InputProps={{ + inputProps: { + min: 0 + } + }} + sx={{ width: '70px' }} + disabled={!getCustomerDefaultSourceApi.data} + /> + + + {/* Total Seats */} + + New Total Seats + {totalSeats + seatsQuantity} + + + + {getAdditionalSeatsProrationApi.loading && ( + + + + )} + + {getCustomerDefaultSourceApi.loading ? ( + + ) : getCustomerDefaultSourceApi.data?.invoice_settings?.default_payment_method ? ( + + Payment Method + + {getCustomerDefaultSourceApi.data.invoice_settings.default_payment_method.card && ( + <> + + + + {getCustomerDefaultSourceApi.data.invoice_settings.default_payment_method.card.brand} + + + ••••{' '} + {getCustomerDefaultSourceApi.data.invoice_settings.default_payment_method.card.last4} + + + (expires{' '} + { + getCustomerDefaultSourceApi.data.invoice_settings.default_payment_method.card + .exp_month + } + / + {getCustomerDefaultSourceApi.data.invoice_settings.default_payment_method.card.exp_year} + ) + + + + )} + + + ) : ( + + + + No payment method found + + + + )} + + {/* Proration info */} + {prorationInfo && ( + + {/* Date Range */} + + {new Date(prorationInfo.currentPeriodStart * 1000).toLocaleDateString('en-US', { + month: 'short', + day: 'numeric' + })}{' '} + -{' '} + {new Date(prorationInfo.currentPeriodEnd * 1000).toLocaleDateString('en-US', { + month: 'short', + day: 'numeric', + year: 'numeric' + })} + + + {/* Base Plan */} + + {currentPlanTitle} + + {prorationInfo.currency} {prorationInfo.basePlanAmount.toFixed(2)} + + + + {/* Additional Seats */} + + + Additional Seats (Prorated) + + Qty {seatsQuantity + purchasedSeats} + + + + + {prorationInfo.currency} {prorationInfo.additionalSeatsProratedAmount.toFixed(2)} + + + {prorationInfo.currency} {prorationInfo.seatPerUnitPrice.toFixed(2)} each + + + + + {/* Credit Balance */} + {prorationInfo.creditBalance !== 0 && ( + + Applied account balance + + {prorationInfo.currency} {prorationInfo.creditBalance.toFixed(2)} + + + )} + + {/* Next Payment */} + + Due today + + {prorationInfo.currency}{' '} + {Math.max(0, prorationInfo.prorationAmount + prorationInfo.creditBalance).toFixed(2)} + + + + {prorationInfo.prorationAmount === 0 && prorationInfo.creditBalance < 0 && ( + + Your available credit will automatically apply to your next invoice. + + )} + + )} + + + {getCustomerDefaultSourceApi.data?.invoice_settings?.default_payment_method && ( + + + + + )} + + + ) +} + +export default AccountSettings diff --git a/packages/ui/src/views/agentexecutions/ExecutionDetails.jsx b/packages/ui/src/views/agentexecutions/ExecutionDetails.jsx index 21e81d64c7b..90fbc18a0d5 100644 --- a/packages/ui/src/views/agentexecutions/ExecutionDetails.jsx +++ b/packages/ui/src/views/agentexecutions/ExecutionDetails.jsx @@ -743,9 +743,9 @@ export const ExecutionDetails = ({ open, isPublic, execution, metadata, onClose, sx={{ pl: 1 }} icon={} variant='outlined' - label={metadata?.agentflow?.name || metadata?.agentflow?.id || 'Go to AgentFlow'} + label={localMetadata?.agentflow?.name || localMetadata?.agentflow?.id || 'Go to AgentFlow'} className={'button'} - onClick={() => window.open(`/v2/agentcanvas/${metadata?.agentflow?.id}`, '_blank')} + onClick={() => window.open(`/v2/agentcanvas/${localMetadata?.agentflow?.id}`, '_blank')} /> )} diff --git a/packages/ui/src/views/agentexecutions/PublicExecutionDetails.jsx b/packages/ui/src/views/agentexecutions/PublicExecutionDetails.jsx index a0c9fa1ebd0..b2c95c6ae37 100644 --- a/packages/ui/src/views/agentexecutions/PublicExecutionDetails.jsx +++ b/packages/ui/src/views/agentexecutions/PublicExecutionDetails.jsx @@ -38,8 +38,16 @@ const PublicExecutionDetails = () => { const executionDetails = typeof execution.executionData === 'string' ? JSON.parse(execution.executionData) : execution.executionData setExecution(executionDetails) - setSelectedMetadata(omit(execution, ['executionData'])) + const newMetadata = { + ...omit(execution, ['executionData']), + agentflow: { + ...selectedMetadata.agentflow + } + } + setSelectedMetadata(newMetadata) } + + // eslint-disable-next-line react-hooks/exhaustive-deps }, [getExecutionByIdPublicApi.data]) useEffect(() => { diff --git a/packages/ui/src/views/agentexecutions/index.jsx b/packages/ui/src/views/agentexecutions/index.jsx index 90d8600c2a5..8d12dd706d2 100644 --- a/packages/ui/src/views/agentexecutions/index.jsx +++ b/packages/ui/src/views/agentexecutions/index.jsx @@ -29,6 +29,7 @@ import { import MainCard from '@/ui-component/cards/MainCard' import ErrorBoundary from '@/ErrorBoundary' import ViewHeader from '@/layout/MainLayout/ViewHeader' +import { Available } from '@/ui-component/rbac/available' // API import useApi from '@/hooks/useApi' @@ -224,8 +225,16 @@ const AgentExecutions = () => { const executionDetails = typeof execution.executionData === 'string' ? JSON.parse(execution.executionData) : execution.executionData setSelectedExecutionData(executionDetails) - setSelectedMetadata(omit(execution, ['executionData'])) + const newMetadata = { + ...omit(execution, ['executionData']), + agentflow: { + ...selectedMetadata.agentflow + } + } + setSelectedMetadata(newMetadata) } + + // eslint-disable-next-line react-hooks/exhaustive-deps }, [getExecutionByIdApi.data]) return ( @@ -336,20 +345,22 @@ const AgentExecutions = () => { - - - - - - - + + + + + + + + + diff --git a/packages/ui/src/views/agentflows/index.jsx b/packages/ui/src/views/agentflows/index.jsx index 6e8d8cde08a..cdd44d585ee 100644 --- a/packages/ui/src/views/agentflows/index.jsx +++ b/packages/ui/src/views/agentflows/index.jsx @@ -10,12 +10,11 @@ import MainCard from '@/ui-component/cards/MainCard' import ItemCard from '@/ui-component/cards/ItemCard' import { gridSpacing } from '@/store/constant' import AgentsEmptySVG from '@/assets/images/agents_empty.svg' -import LoginDialog from '@/ui-component/dialog/LoginDialog' import ConfirmDialog from '@/ui-component/dialog/ConfirmDialog' import { FlowListTable } from '@/ui-component/table/FlowListTable' -import { StyledButton } from '@/ui-component/button/StyledButton' import ViewHeader from '@/layout/MainLayout/ViewHeader' import ErrorBoundary from '@/ErrorBoundary' +import { StyledPermissionButton } from '@/ui-component/button/RBACButtons' // API import chatflowsApi from '@/api/chatflows' @@ -25,6 +24,7 @@ import useApi from '@/hooks/useApi' // const import { baseURL, AGENTFLOW_ICONS } from '@/store/constant' +import { useError } from '@/store/context/ErrorContext' // icons import { IconPlus, IconLayoutGrid, IconList } from '@tabler/icons-react' @@ -36,12 +36,10 @@ const Agentflows = () => { const theme = useTheme() const [isLoading, setLoading] = useState(true) - const [error, setError] = useState(null) const [images, setImages] = useState({}) const [icons, setIcons] = useState({}) const [search, setSearch] = useState('') - const [loginDialogOpen, setLoginDialogOpen] = useState(false) - const [loginDialogProps, setLoginDialogProps] = useState({}) + const { error, setError } = useError() const getAllAgentflows = useApi(chatflowsApi.getAllAgentflows) const [view, setView] = useState(localStorage.getItem('flowDisplayStyle') || 'card') @@ -72,12 +70,6 @@ const Agentflows = () => { ) } - const onLoginClick = (username, password) => { - localStorage.setItem('username', username) - localStorage.setItem('password', password) - navigate(0) - } - const addNew = () => { if (agentflowVersion === 'v2') { navigate('/v2/agentcanvas') @@ -102,16 +94,10 @@ const Agentflows = () => { useEffect(() => { if (getAllAgentflows.error) { - if (getAllAgentflows.error?.response?.status === 401) { - setLoginDialogProps({ - title: 'Login', - confirmButtonName: 'Login' - }) - setLoginDialogOpen(true) - } else { - setError(getAllAgentflows.error) - } + setError(getAllAgentflows.error) } + + // eslint-disable-next-line react-hooks/exhaustive-deps }, [getAllAgentflows.error]) useEffect(() => { @@ -131,13 +117,17 @@ const Agentflows = () => { images[agentflows[i].id] = [] icons[agentflows[i].id] = [] for (let j = 0; j < nodes.length; j += 1) { + if (nodes[j].data.name === 'stickyNote' || nodes[j].data.name === 'stickyNoteAgentflow') continue const foundIcon = AGENTFLOW_ICONS.find((icon) => icon.name === nodes[j].data.name) if (foundIcon) { icons[agentflows[i].id].push(foundIcon) } else { const imageSrc = `${baseURL}/api/v1/node-icon/${nodes[j].data.name}` - if (!images[agentflows[i].id].includes(imageSrc)) { - images[agentflows[i].id].push(imageSrc) + if (!images[agentflows[i].id].some((img) => img.imageSrc === imageSrc)) { + images[agentflows[i].id].push({ + imageSrc, + label: nodes[j].data.label + }) } } } @@ -228,9 +218,15 @@ const Agentflows = () => { - } sx={{ borderRadius: 2, height: 40 }}> + } + sx={{ borderRadius: 2, height: 40 }} + > Add New - + {!view || view === 'card' ? ( <> @@ -257,6 +253,7 @@ const Agentflows = () => { ) : ( { )} )} - - ) diff --git a/packages/ui/src/views/agentflowsv2/AgentFlowEdge.jsx b/packages/ui/src/views/agentflowsv2/AgentFlowEdge.jsx index 56e73908a22..ad34ccf608b 100644 --- a/packages/ui/src/views/agentflowsv2/AgentFlowEdge.jsx +++ b/packages/ui/src/views/agentflowsv2/AgentFlowEdge.jsx @@ -1,6 +1,10 @@ import { EdgeLabelRenderer, getBezierPath } from 'reactflow' -import { memo } from 'react' +import { memo, useState, useContext } from 'react' import PropTypes from 'prop-types' +import { useDispatch } from 'react-redux' +import { SET_DIRTY } from '@/store/actions' +import { flowContext } from '@/store/context/ReactFlowContext' +import { IconX } from '@tabler/icons-react' function EdgeLabel({ transform, isHumanInput, label, color }) { return ( @@ -30,11 +34,23 @@ EdgeLabel.propTypes = { color: PropTypes.string } +const foreignObjectSize = 40 + const AgentFlowEdge = ({ id, sourceX, sourceY, targetX, targetY, sourcePosition, targetPosition, data, markerEnd, selected }) => { + const [isHovered, setIsHovered] = useState(false) + const { deleteEdge } = useContext(flowContext) + const dispatch = useDispatch() + + const onEdgeClick = (evt, id) => { + evt.stopPropagation() + deleteEdge(id) + dispatch({ type: SET_DIRTY }) + } + const xEqual = sourceX === targetX const yEqual = sourceY === targetY - const [edgePath] = getBezierPath({ + const [edgePath, edgeCenterX, edgeCenterY] = getBezierPath({ // we need this little hack in order to display the gradient for a straight line sourceX: xEqual ? sourceX + 0.0001 : sourceX, sourceY: yEqual ? sourceY + 0.0001 : sourceY, @@ -63,6 +79,8 @@ const AgentFlowEdge = ({ id, sourceX, sourceY, targetX, targetY, sourcePosition, cursor: 'pointer' }} d={edgePath} + onMouseEnter={() => setIsHovered(true)} + onMouseLeave={() => setIsHovered(false)} /> setIsHovered(true)} + onMouseLeave={() => setIsHovered(false)} /> {data?.edgeLabel && ( @@ -88,6 +108,70 @@ const AgentFlowEdge = ({ id, sourceX, sourceY, targetX, targetY, sourcePosition, /> )} + {isHovered && ( + setIsHovered(true)} + onMouseLeave={() => setIsHovered(false)} + > +
    + +
    +
    + )} ) } diff --git a/packages/ui/src/views/agentflowsv2/AgentFlowNode.jsx b/packages/ui/src/views/agentflowsv2/AgentFlowNode.jsx index c5a71913a74..d52639d42f3 100644 --- a/packages/ui/src/views/agentflowsv2/AgentFlowNode.jsx +++ b/packages/ui/src/views/agentflowsv2/AgentFlowNode.jsx @@ -20,7 +20,8 @@ import { IconCopy, IconTrash, IconInfoCircle, - IconLoader + IconLoader, + IconAlertCircleFilled } from '@tabler/icons-react' import StopCircleIcon from '@mui/icons-material/StopCircle' import CancelIcon from '@mui/icons-material/Cancel' @@ -51,11 +52,13 @@ const StyledNodeToolbar = styled(NodeToolbar)(({ theme }) => ({ const AgentFlowNode = ({ data }) => { const theme = useTheme() const customization = useSelector((state) => state.customization) + const canvas = useSelector((state) => state.canvas) const ref = useRef(null) const updateNodeInternals = useUpdateNodeInternals() // eslint-disable-next-line const [position, setPosition] = useState(0) const [isHovered, setIsHovered] = useState(false) + const [warningMessage, setWarningMessage] = useState('') const { deleteNode, duplicateNode } = useContext(flowContext) const [showInfoDialog, setShowInfoDialog] = useState(false) const [infoDialogProps, setInfoDialogProps] = useState({}) @@ -132,6 +135,28 @@ const AgentFlowNode = ({ data }) => { } }, [data, ref, updateNodeInternals]) + useEffect(() => { + const nodeOutdatedMessage = (oldVersion, newVersion) => + `Node version ${oldVersion} outdated\nUpdate to latest version ${newVersion}` + const nodeVersionEmptyMessage = (newVersion) => `Node outdated\nUpdate to latest version ${newVersion}` + + const componentNode = canvas.componentNodes.find((nd) => nd.name === data.name) + if (componentNode) { + if (!data.version) { + setWarningMessage(nodeVersionEmptyMessage(componentNode.version)) + } else if (data.version && componentNode.version > data.version) { + setWarningMessage(nodeOutdatedMessage(data.version, componentNode.version)) + } else if (componentNode.badge === 'DEPRECATING') { + setWarningMessage( + componentNode?.deprecateMessage ?? + 'This node will be deprecated in the next release. Change to a new node tagged with NEW' + ) + } else { + setWarningMessage('') + } + } + }, [canvas.componentNodes, data.name, data.version]) + return (
    setIsHovered(true)} onMouseLeave={() => setIsHovered(false)}> @@ -236,6 +261,24 @@ const AgentFlowNode = ({ data }) => { )} + {warningMessage && ( + {warningMessage}}> + + + + + )} + {!data.hideInput && ( { return ( - {toolName} - + /> ) }) } else { @@ -404,23 +441,17 @@ const AgentFlowNode = ({ data }) => { return [ - {toolName} - + /> ] } })} diff --git a/packages/ui/src/views/agentflowsv2/Canvas.jsx b/packages/ui/src/views/agentflowsv2/Canvas.jsx index d0b3c6ec70b..3a724666f94 100644 --- a/packages/ui/src/views/agentflowsv2/Canvas.jsx +++ b/packages/ui/src/views/agentflowsv2/Canvas.jsx @@ -42,7 +42,7 @@ import useApi from '@/hooks/useApi' import useConfirm from '@/hooks/useConfirm' // icons -import { IconX, IconRefreshAlert } from '@tabler/icons-react' +import { IconX, IconRefreshAlert, IconMagnetFilled, IconMagnetOff } from '@tabler/icons-react' // utils import { @@ -100,6 +100,7 @@ const AgentflowCanvas = () => { const [isSyncNodesButtonEnabled, setIsSyncNodesButtonEnabled] = useState(false) const [editNodeDialogOpen, setEditNodeDialogOpen] = useState(false) const [editNodeDialogProps, setEditNodeDialogProps] = useState({}) + const [isSnappingEnabled, setIsSnappingEnabled] = useState(false) const reactFlowWrapper = useRef(null) @@ -718,17 +719,30 @@ const AgentflowCanvas = () => { fitView deleteKeyCode={canvas.canvasDialogShow ? null : ['Delete']} minZoom={0.5} + snapGrid={[25, 25]} + snapToGrid={isSnappingEnabled} connectionLineComponent={ConnectionLine} > + > + + { setExpanded(isExpanded) } @@ -133,6 +136,13 @@ export const ConfigInput = ({ data, inputParam, disabled = false, arrayIndex = n componentNodeData.credential = credential ? credential : undefined setSelectedComponentNodeData(componentNodeData) + + // Store the processed inputs to track changes + lastProcessedInputsRef.current = { + mainValue: data.inputs[inputParam.name], + configValue: data.inputs[`${inputParam.name}Config`], + arrayValue: parentParamForArray ? data.inputs[parentParamForArray.name] : null + } } loadComponentData() @@ -140,6 +150,92 @@ export const ConfigInput = ({ data, inputParam, disabled = false, arrayIndex = n // eslint-disable-next-line react-hooks/exhaustive-deps }, []) + // Handle external changes to data.inputs + useEffect(() => { + if (!selectedComponentNodeData.inputParams) return + + // Get current input values + const currentMainValue = data.inputs[inputParam.name] + const currentConfigValue = data.inputs[`${inputParam.name}Config`] + const currentArrayValue = parentParamForArray ? data.inputs[parentParamForArray.name] : null + + // Check if relevant inputs have changed + const hasMainValueChanged = lastProcessedInputsRef.current.mainValue !== currentMainValue + const hasConfigValueChanged = lastProcessedInputsRef.current.configValue !== currentConfigValue + const hasArrayValueChanged = lastProcessedInputsRef.current.arrayValue !== currentArrayValue + + if (!hasMainValueChanged && !hasConfigValueChanged && !hasArrayValueChanged) { + return // No relevant changes + } + + // Update selectedComponentNodeData with new input values + const updateComponentData = () => { + const updatedComponentData = cloneDeep(selectedComponentNodeData) + + // Helper functions (same as in initial load) + const hasArrayConfig = () => { + return ( + parentParamForArray && + data.inputs[parentParamForArray.name] && + Array.isArray(data.inputs[parentParamForArray.name]) && + data.inputs[parentParamForArray.name][arrayIndex] && + data.inputs[parentParamForArray.name][arrayIndex][`${inputParam.name}Config`] + ) + } + + const getCurrentInputValue = () => { + return hasArrayConfig() ? data.inputs[parentParamForArray.name][arrayIndex][inputParam.name] : data.inputs[inputParam.name] + } + + const getConfigData = () => { + return hasArrayConfig() + ? data.inputs[parentParamForArray.name][arrayIndex][`${inputParam.name}Config`] + : data.inputs[`${inputParam.name}Config`] + } + + // Update the main input value in component data + const currentValue = getCurrentInputValue() + if (currentValue !== undefined) { + updatedComponentData.inputs[inputParam.name] = currentValue + } + + // If there's config data and it matches the current value, use it + if (hasArrayConfig() || data.inputs[`${inputParam.name}Config`]) { + const configData = getConfigData() + if (configData && configData[inputParam.name] === currentValue) { + // Config is still valid, merge it with current value + updatedComponentData.inputs = { ...configData, [inputParam.name]: currentValue } + } else if (hasMainValueChanged) { + // Main value changed but config doesn't match, reset to defaults with new value + const defaultInput = initializeDefaultNodeData(updatedComponentData.inputParams) + updatedComponentData.inputs = { ...defaultInput, [inputParam.name]: currentValue } + } + } + + // Update input parameters visibility + updatedComponentData.inputParams = showHideInputParams({ + ...updatedComponentData, + inputs: updatedComponentData.inputs + }) + + const credential = updatedComponentData.inputs.credential || updatedComponentData.inputs[FLOWISE_CREDENTIAL_ID] + updatedComponentData.credential = credential ? credential : undefined + + setSelectedComponentNodeData(updatedComponentData) + + // Update the tracked values + lastProcessedInputsRef.current = { + mainValue: currentMainValue, + configValue: currentConfigValue, + arrayValue: currentArrayValue + } + } + + updateComponentData() + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [data.inputs, inputParam.name, parentParamForArray?.name, arrayIndex]) + // Update node configuration when selected component data changes useEffect(() => { if (!selectedComponentNodeData.inputs) return @@ -169,7 +265,7 @@ export const ConfigInput = ({ data, inputParam, disabled = false, arrayIndex = n ) // eslint-disable-next-line react-hooks/exhaustive-deps - }, [data.inputs, arrayIndex, parentParamForArray, selectedComponentNodeData]) + }, [selectedComponentNodeData]) return ( <> diff --git a/packages/ui/src/views/agentflowsv2/MarketplaceCanvas.jsx b/packages/ui/src/views/agentflowsv2/MarketplaceCanvas.jsx index 3e4ef254efe..a63cd184ac6 100644 --- a/packages/ui/src/views/agentflowsv2/MarketplaceCanvas.jsx +++ b/packages/ui/src/views/agentflowsv2/MarketplaceCanvas.jsx @@ -4,6 +4,7 @@ import 'reactflow/dist/style.css' import '@/views/canvas/index.css' import { useLocation, useNavigate } from 'react-router-dom' +import { useSelector } from 'react-redux' // material-ui import { Toolbar, Box, AppBar } from '@mui/material' @@ -18,6 +19,9 @@ import StickyNote from './StickyNote' import EditNodeDialog from '@/views/agentflowsv2/EditNodeDialog' import { flowContext } from '@/store/context/ReactFlowContext' +// icons +import { IconMagnetFilled, IconMagnetOff } from '@tabler/icons-react' + const nodeTypes = { agentFlow: AgentFlowNode, stickyNote: StickyNote, iteration: IterationNode } const edgeTypes = { agentFlow: AgentFlowEdge } @@ -26,6 +30,7 @@ const edgeTypes = { agentFlow: AgentFlowEdge } const MarketplaceCanvasV2 = () => { const theme = useTheme() const navigate = useNavigate() + const customization = useSelector((state) => state.customization) const { state } = useLocation() const { flowData, name } = state @@ -36,6 +41,7 @@ const MarketplaceCanvasV2 = () => { const [edges, setEdges, onEdgesChange] = useEdgesState() const [editNodeDialogOpen, setEditNodeDialogOpen] = useState(false) const [editNodeDialogProps, setEditNodeDialogProps] = useState({}) + const [isSnappingEnabled, setIsSnappingEnabled] = useState(false) const reactFlowWrapper = useRef(null) const { setReactFlowInstance } = useContext(flowContext) @@ -108,15 +114,29 @@ const MarketplaceCanvasV2 = () => { edgeTypes={edgeTypes} fitView minZoom={0.1} + snapGrid={[25, 25]} + snapToGrid={isSnappingEnabled} > + > + + {moment(props.apiKey.createdAt).format('MMMM Do, YYYY')} - - - - - - - - - - + + + + + + + + + + + + + + {open && ( @@ -199,12 +205,12 @@ const APIKey = () => { const dispatch = useDispatch() useNotifier() + const { error, setError } = useError() const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) const [isLoading, setLoading] = useState(true) - const [error, setError] = useState(null) const [showDialog, setShowDialog] = useState(false) const [dialogProps, setDialogProps] = useState({}) const [apiKeys, setAPIKeys] = useState([]) @@ -354,12 +360,6 @@ const APIKey = () => { } }, [getAllAPIKeysApi.data]) - useEffect(() => { - if (getAllAPIKeysApi.error) { - setError(getAllAPIKeysApi.error) - } - }, [getAllAPIKeysApi.error]) - return ( <> @@ -374,7 +374,8 @@ const APIKey = () => { title='API Keys' description='Flowise API & SDK authentication keys' > - - + { id='btn_createApiKey' > Create Key - + {!isLoading && apiKeys.length <= 0 ? ( @@ -422,9 +424,13 @@ const APIKey = () => { Key Name API Key Usage - Created - - + Updated + + + + + + @@ -443,12 +449,12 @@ const APIKey = () => { - - - - - - + + + + + + @@ -463,12 +469,12 @@ const APIKey = () => { - - - - - - + + + + + + ) : ( diff --git a/packages/ui/src/views/assistants/custom/CustomAssistantConfigurePreview.jsx b/packages/ui/src/views/assistants/custom/CustomAssistantConfigurePreview.jsx index b9b4326ac6a..8d97ddf0e79 100644 --- a/packages/ui/src/views/assistants/custom/CustomAssistantConfigurePreview.jsx +++ b/packages/ui/src/views/assistants/custom/CustomAssistantConfigurePreview.jsx @@ -39,6 +39,7 @@ import ViewLeadsDialog from '@/ui-component/dialog/ViewLeadsDialog' import Settings from '@/views/settings' import ConfirmDialog from '@/ui-component/dialog/ConfirmDialog' import PromptGeneratorDialog from '@/ui-component/dialog/PromptGeneratorDialog' +import { Available } from '@/ui-component/rbac/available' import ExpandTextDialog from '@/ui-component/dialog/ExpandTextDialog' // API @@ -866,68 +867,72 @@ const CustomAssistantConfigurePreview = () => { )} - - - - - - {customAssistantFlowId && !loadingAssistant && ( - + + setSettingsOpen(!isSettingsOpen)} + color='inherit' + onClick={onSaveAndProcess} > - + - )} - {!customAssistantFlowId && !loadingAssistant && ( - + + {customAssistantFlowId && !loadingAssistant && ( + setSettingsOpen(!isSettingsOpen)} > - + )} + {!customAssistantFlowId && !loadingAssistant && ( + + + + + + + + )} { {selectedChatModel && Object.keys(selectedChatModel).length > 0 && ( - + + + )}
    diff --git a/packages/ui/src/views/assistants/custom/CustomAssistantLayout.jsx b/packages/ui/src/views/assistants/custom/CustomAssistantLayout.jsx index 2e21d6edfea..fcdb4f2b891 100644 --- a/packages/ui/src/views/assistants/custom/CustomAssistantLayout.jsx +++ b/packages/ui/src/views/assistants/custom/CustomAssistantLayout.jsx @@ -10,9 +10,9 @@ import MainCard from '@/ui-component/cards/MainCard' import ItemCard from '@/ui-component/cards/ItemCard' import { baseURL, gridSpacing } from '@/store/constant' import AssistantEmptySVG from '@/assets/images/assistant_empty.svg' -import { StyledButton } from '@/ui-component/button/StyledButton' import AddCustomAssistantDialog from './AddCustomAssistantDialog' import ErrorBoundary from '@/ErrorBoundary' +import { StyledPermissionButton } from '@/ui-component/button/RBACButtons' // API import assistantsApi from '@/api/assistants' @@ -101,14 +101,15 @@ const CustomAssistantLayout = () => { description='Create custom assistants with your choice of LLMs' onBack={() => navigate(-1)} > - } > Add - + {isLoading ? ( diff --git a/packages/ui/src/views/assistants/openai/AssistantDialog.jsx b/packages/ui/src/views/assistants/openai/AssistantDialog.jsx index cd6722ef91b..7b2b3041cc9 100644 --- a/packages/ui/src/views/assistants/openai/AssistantDialog.jsx +++ b/packages/ui/src/views/assistants/openai/AssistantDialog.jsx @@ -21,7 +21,6 @@ import { OutlinedInput } from '@mui/material' -import { StyledButton } from '@/ui-component/button/StyledButton' import { TooltipWithParser } from '@/ui-component/tooltip/TooltipWithParser' import { Dropdown } from '@/ui-component/dropdown/Dropdown' import { MultiDropdown } from '@/ui-component/dropdown/MultiDropdown' @@ -30,6 +29,7 @@ import { File } from '@/ui-component/file/File' import { BackdropLoader } from '@/ui-component/loading/BackdropLoader' import DeleteConfirmDialog from './DeleteConfirmDialog' import AssistantVectorStoreDialog from './AssistantVectorStoreDialog' +import { StyledPermissionButton } from '@/ui-component/button/RBACButtons' // Icons import { IconX, IconPlus } from '@tabler/icons-react' @@ -205,6 +205,7 @@ const AssistantDialog = ({ show, dialogProps, onCancel, onConfirm, setError }) = useEffect(() => { if (getSpecificAssistantApi.error) { + const error = getSpecificAssistantApi.error let errMsg = '' if (error?.response?.data) { errMsg = typeof error.response.data === 'object' ? error.response.data.message : error.response.data @@ -1035,22 +1036,33 @@ const AssistantDialog = ({ show, dialogProps, onCancel, onConfirm, setError }) = {dialogProps.type === 'EDIT' && ( - onSyncClick()}> + onSyncClick()} + > Sync - + )} {dialogProps.type === 'EDIT' && ( - onDeleteClick()}> + onDeleteClick()} + > Delete - + )} - (dialogProps.type === 'ADD' ? addNewAssistant() : saveAssistant())} > {dialogProps.confirmButtonName} - + { description='Create assistants using OpenAI Assistant API' onBack={() => navigate(-1)} > - - + } > Add - + {isLoading ? ( diff --git a/packages/ui/src/views/auth/expired.jsx b/packages/ui/src/views/auth/expired.jsx new file mode 100644 index 00000000000..6f893d5d666 --- /dev/null +++ b/packages/ui/src/views/auth/expired.jsx @@ -0,0 +1,46 @@ +import MainCard from '@/ui-component/cards/MainCard' +import { Box, Stack, Typography } from '@mui/material' +import contactSupport from '@/assets/images/contact_support.svg' +import { StyledButton } from '@/ui-component/button/StyledButton' + +// ==============================|| License Expired Page ||============================== // + +const LicenseExpired = () => { + return ( + <> + + + + + contact support + + + Your enterprise license has expired + + + Please contact our support team to renew your license. + + + Contact Support + + + + + + ) +} + +export default LicenseExpired diff --git a/packages/ui/src/views/auth/forgotPassword.jsx b/packages/ui/src/views/auth/forgotPassword.jsx new file mode 100644 index 00000000000..9e17f243617 --- /dev/null +++ b/packages/ui/src/views/auth/forgotPassword.jsx @@ -0,0 +1,146 @@ +import { useEffect, useState } from 'react' +import { Link } from 'react-router-dom' + +// material-ui +import { Alert, Box, Stack, Typography, useTheme } from '@mui/material' + +// project imports +import { StyledButton } from '@/ui-component/button/StyledButton' +import MainCard from '@/ui-component/cards/MainCard' +import { Input } from '@/ui-component/input/Input' +import { BackdropLoader } from '@/ui-component/loading/BackdropLoader' + +// API +import accountApi from '@/api/account.api' + +// Hooks +import useApi from '@/hooks/useApi' +import { useConfig } from '@/store/context/ConfigContext' + +// utils +import useNotifier from '@/utils/useNotifier' + +// Icons +import { IconCircleCheck, IconExclamationCircle } from '@tabler/icons-react' + +// ==============================|| ForgotPasswordPage ||============================== // + +const ForgotPasswordPage = () => { + const theme = useTheme() + useNotifier() + + const usernameInput = { + label: 'Username', + name: 'username', + type: 'email', + placeholder: 'user@company.com' + } + const [usernameVal, setUsernameVal] = useState('') + const { isEnterpriseLicensed } = useConfig() + + const [isLoading, setLoading] = useState(false) + const [responseMsg, setResponseMsg] = useState(undefined) + + const forgotPasswordApi = useApi(accountApi.forgotPassword) + + const sendResetRequest = async (event) => { + event.preventDefault() + const body = { + user: { + email: usernameVal + } + } + setLoading(true) + await forgotPasswordApi.request(body) + } + + useEffect(() => { + if (forgotPasswordApi.error) { + const errMessage = + typeof forgotPasswordApi.error.response.data === 'object' + ? forgotPasswordApi.error.response.data.message + : forgotPasswordApi.error.response.data + setResponseMsg({ + type: 'error', + msg: errMessage ?? 'Failed to send instructions, please contact your administrator.' + }) + setLoading(false) + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [forgotPasswordApi.error]) + + useEffect(() => { + if (forgotPasswordApi.data) { + setResponseMsg({ + type: 'success', + msg: 'Password reset instructions sent to the email.' + }) + setLoading(false) + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [forgotPasswordApi.data]) + + return ( + <> + + + {responseMsg && responseMsg?.type === 'error' && ( + } variant='filled' severity='error'> + {responseMsg.msg} + + )} + {responseMsg && responseMsg?.type !== 'error' && ( + } variant='filled' severity='success'> + {responseMsg.msg} + + )} + + Forgot Password? + + Have a reset password code?{' '} + + Change your password here + + . + + +
    + + +
    + + Email * + + +
    +
    + setUsernameVal(newValue)} + value={usernameVal} + showDialog={false} + /> + {isEnterpriseLicensed && ( + + If you forgot the email you used for signing up, please contact your administrator. + + )} +
    + + Send Reset Password Instructions + +
    +
    + +
    +
    + + ) +} + +export default ForgotPasswordPage diff --git a/packages/ui/src/views/auth/login.jsx b/packages/ui/src/views/auth/login.jsx new file mode 100644 index 00000000000..54eb7a83370 --- /dev/null +++ b/packages/ui/src/views/auth/login.jsx @@ -0,0 +1,45 @@ +import { useEffect, useState } from 'react' + +// material-ui +import { BackdropLoader } from '@/ui-component/loading/BackdropLoader' + +// project imports +import MainCard from '@/ui-component/cards/MainCard' + +// API +import authApi from '@/api/auth' + +// Hooks +import useApi from '@/hooks/useApi' + +// ==============================|| ResolveLoginPage ||============================== // + +const ResolveLoginPage = () => { + const resolveLogin = useApi(authApi.resolveLogin) + const [loading, setLoading] = useState(false) + + useEffect(() => { + setLoading(false) + }, [resolveLogin.error]) + + useEffect(() => { + resolveLogin.request({}) + setLoading(true) + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []) + + useEffect(() => { + setLoading(false) + if (resolveLogin.data) { + window.location.href = resolveLogin.data.redirectUrl + } + }, [resolveLogin.data]) + + return ( + <> + {loading && } + + ) +} + +export default ResolveLoginPage diff --git a/packages/ui/src/views/auth/loginActivity.jsx b/packages/ui/src/views/auth/loginActivity.jsx new file mode 100644 index 00000000000..2e01078e8f6 --- /dev/null +++ b/packages/ui/src/views/auth/loginActivity.jsx @@ -0,0 +1,638 @@ +import { forwardRef, useEffect, useState } from 'react' +import { useDispatch, useSelector } from 'react-redux' +import moment from 'moment/moment' +import PropTypes from 'prop-types' + +// material-ui +import { + Box, + Skeleton, + Stack, + Table, + TableBody, + TableContainer, + TableHead, + TableRow, + Paper, + IconButton, + useTheme, + Checkbox, + Button, + OutlinedInput, + MenuItem, + Select, + InputLabel, + FormControl, + ListItemText, + ListItemButton +} from '@mui/material' + +// project imports +import MainCard from '@/ui-component/cards/MainCard' +import ConfirmDialog from '@/ui-component/dialog/ConfirmDialog' +import ViewHeader from '@/layout/MainLayout/ViewHeader' +import ErrorBoundary from '@/ErrorBoundary' +import { StyledTableCell, StyledTableRow } from '@/ui-component/table/TableStyles' +import DatePicker from 'react-datepicker' +import 'react-datepicker/dist/react-datepicker.css' + +// API +import auditApi from '@/api/audit' + +// Hooks +import useApi from '@/hooks/useApi' +import useConfirm from '@/hooks/useConfirm' + +// utils +import useNotifier from '@/utils/useNotifier' + +// Icons +import { IconCircleX, IconChevronLeft, IconChevronRight, IconTrash, IconX, IconLogin, IconLogout } from '@tabler/icons-react' + +// store +import { useError } from '@/store/context/ErrorContext' +import { enqueueSnackbar as enqueueSnackbarAction, closeSnackbar as closeSnackbarAction } from '@/store/actions' +import { PermissionButton } from '@/ui-component/button/RBACButtons' + +const activityTypes = [ + 'Login Success', + 'Logout Success', + 'Unknown User', + 'Incorrect Credential', + 'User Disabled', + 'No Assigned Workspace', + 'Unknown Activity' +] +const MenuProps = { + PaperProps: { + style: { + width: 160 + } + } +} +const SelectStyles = { + '& .MuiOutlinedInput-notchedOutline': { + borderRadius: 2 + } +} + +// ==============================|| Login Activity ||============================== // + +const DatePickerCustomInput = forwardRef(function DatePickerCustomInput({ value, onClick }, ref) { + return ( + + {value} + + ) +}) + +DatePickerCustomInput.propTypes = { + value: PropTypes.string, + onClick: PropTypes.func +} +const LoginActivity = () => { + const theme = useTheme() + const customization = useSelector((state) => state.customization) + const dispatch = useDispatch() + useNotifier() + const { error, setError } = useError() + + const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) + const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) + + const [isLoading, setLoading] = useState(true) + + const { confirm } = useConfirm() + + const getLoginActivityApi = useApi(auditApi.fetchLoginActivity) + const [activity, setActivity] = useState([]) + const [typeFilter, setTypeFilter] = useState([]) + const [totalRecords, setTotalRecords] = useState(0) + const [currentPage, setCurrentPage] = useState(1) + const [start, setStart] = useState(1) + const [end, setEnd] = useState(50) + const [startDate, setStartDate] = useState(new Date(new Date().setMonth(new Date().getMonth() - 1))) + const [endDate, setEndDate] = useState(new Date()) + + const [selected, setSelected] = useState([]) + + const onStartDateSelected = (date) => { + setStartDate(date) + refreshData(currentPage, date, endDate, typeFilter) + } + + const onEndDateSelected = (date) => { + setEndDate(date) + refreshData(currentPage, startDate, date, typeFilter) + } + + const onSelectAllClick = (event) => { + if (event.target.checked) { + const newSelected = activity.map((n) => n.id) + setSelected(newSelected) + return + } + setSelected([]) + } + + const handleSelect = (event, id) => { + const selectedIndex = selected.indexOf(id) + let newSelected = [] + + if (selectedIndex === -1) { + newSelected = newSelected.concat(selected, id) + } else if (selectedIndex === 0) { + newSelected = newSelected.concat(selected.slice(1)) + } else if (selectedIndex === selected.length - 1) { + newSelected = newSelected.concat(selected.slice(0, -1)) + } else if (selectedIndex > 0) { + newSelected = newSelected.concat(selected.slice(0, selectedIndex), selected.slice(selectedIndex + 1)) + } + setSelected(newSelected) + } + + const refreshData = (_page, _start, _end, _filter) => { + const activityCodes = [] + if (_filter.length > 0) { + _filter.forEach((type) => { + activityCodes.push(getActivityCode(type)) + }) + } + getLoginActivityApi.request({ + pageNo: _page, + startDate: _start, + endDate: _end, + activityCodes: activityCodes + }) + } + + const changePage = (newPage) => { + setLoading(true) + setCurrentPage(newPage) + refreshData(newPage, startDate, endDate, typeFilter) + } + + const handleTypeFilterChange = (event) => { + const { + target: { value } + } = event + let newVar = typeof value === 'string' ? value.split(',') : value + setTypeFilter(newVar) + refreshData(currentPage, startDate, endDate, newVar) + } + + function getActivityDescription(activityCode) { + switch (activityCode) { + case 0: + return 'Login Success' + case 1: + return 'Logout Success' + case -1: + return 'Unknown User' + case -2: + return 'Incorrect Credential' + case -3: + return 'User Disabled' + case -4: + return 'No Assigned Workspace' + default: + return 'Unknown Activity' + } + } + + function getActivityCode(activityDescription) { + switch (activityDescription) { + case 'Login Success': + return 0 + case 'Logout Success': + return 1 + case 'Unknown User': + return -1 + case 'Incorrect Credential': + return -2 + case 'User Disabled': + return -3 + case 'No Assigned Workspace': + return -4 + default: + return -99 + } + } + + const deleteLoginActivity = async () => { + const confirmPayload = { + title: `Delete`, + description: `Delete ${selected.length} ${selected.length > 1 ? 'records' : 'record'}? `, + confirmButtonName: 'Delete', + cancelButtonName: 'Cancel' + } + const isConfirmed = await confirm(confirmPayload) + // + if (isConfirmed) { + try { + const deleteResp = await auditApi.deleteLoginActivity({ + selected: selected + }) + if (deleteResp.data) { + enqueueSnackbar({ + message: selected.length + ' Login Activity Records Deleted Successfully', + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + onConfirm() + } + } catch (error) { + enqueueSnackbar({ + message: `Failed to delete records: ${ + typeof error.response.data === 'object' ? error.response.data.message : error.response.data + }`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + } + } + } + + const onConfirm = () => { + getLoginActivityApi.request() + } + + useEffect(() => { + getLoginActivityApi.request({ + pageNo: 1 + }) + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []) + + useEffect(() => { + setLoading(getLoginActivityApi.loading) + }, [getLoginActivityApi.loading]) + + useEffect(() => { + if (getLoginActivityApi.error) { + setError(getLoginActivityApi.error) + } + }, [getLoginActivityApi.error, setError]) + + useEffect(() => { + if (getLoginActivityApi.data) { + const data = getLoginActivityApi.data + setTotalRecords(data.count) + setLoading(false) + setCurrentPage(data.currentPage) + setStart(data.currentPage * data.pageSize - (data.pageSize - 1)) + setEnd(data.currentPage * data.pageSize > data.count ? data.count : data.currentPage * data.pageSize) + setActivity(data.data) + setSelected([]) + } + }, [getLoginActivityApi.data]) + + return ( + <> + + {error ? ( + + ) : ( + + + + +
    +
    +
    + From: + onStartDateSelected(date)} + selectsStart + startDate={startDate} + endDate={endDate} + customInput={} + /> +
    +
    + To: + onEndDateSelected(date)} + selectsEnd + startDate={startDate} + endDate={endDate} + minDate={startDate} + maxDate={new Date()} + customInput={} + /> +
    +
    + + + Filter By + + + +
    +
    +
    +
    + changePage(currentPage - 1)} + style={{ marginRight: 10 }} + variant='outlined' + disabled={currentPage === 1} + > + + + Showing {Math.min(start, totalRecords)}-{end} of {totalRecords} Records + changePage(currentPage + 1)} + style={{ marginLeft: 10 }} + variant='outlined' + disabled={end >= totalRecords} + > + = totalRecords + ? '#616161' + : 'white' + : end >= totalRecords + ? '#e0e0e0' + : 'black' + } + /> + +
    + } + > + {'Delete Selected'} + +
    +
    + + + + + + + + Activity + User + Date + Method + Message + + + + {isLoading ? ( + <> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ) : ( + <> + {activity.map((item, index) => ( + + + handleSelect(event, item.id)} + /> + + +
    +
    + {item.activityCode === 0 && ( + + )} + {item.activityCode === 1 && ( + + )} + {item.activityCode < 0 && ( + + )} +
    +
    {getActivityDescription(item.activityCode)}
    +
    +
    + {item.username} + + {moment(item.attemptedDateTime).format('MMMM Do, YYYY, HH:mm')} + + + {item.loginMode ? item.loginMode : 'Email/Password'} + + {item.message} +
    + ))} + + )} +
    +
    +
    +
    +
    +
    + )} +
    + + + ) +} + +export default LoginActivity diff --git a/packages/ui/src/views/auth/register.jsx b/packages/ui/src/views/auth/register.jsx new file mode 100644 index 00000000000..f7352056d17 --- /dev/null +++ b/packages/ui/src/views/auth/register.jsx @@ -0,0 +1,472 @@ +import { useEffect, useState } from 'react' +import { Link, useNavigate, useSearchParams } from 'react-router-dom' +import { z } from 'zod' + +// material-ui +import { Alert, Box, Button, Divider, Icon, List, ListItemText, OutlinedInput, Stack, Typography, useTheme } from '@mui/material' + +// project imports +import { StyledButton } from '@/ui-component/button/StyledButton' +import { Input } from '@/ui-component/input/Input' +import { BackdropLoader } from '@/ui-component/loading/BackdropLoader' + +// API +import accountApi from '@/api/account.api' +import loginMethodApi from '@/api/loginmethod' +import ssoApi from '@/api/sso' + +// Hooks +import useApi from '@/hooks/useApi' +import { useConfig } from '@/store/context/ConfigContext' + +// utils +import useNotifier from '@/utils/useNotifier' +import { passwordSchema } from '@/utils/validation' + +// Icons +import Auth0SSOLoginIcon from '@/assets/images/auth0.svg' +import GithubSSOLoginIcon from '@/assets/images/github.svg' +import GoogleSSOLoginIcon from '@/assets/images/google.svg' +import AzureSSOLoginIcon from '@/assets/images/microsoft-azure.svg' +import { store } from '@/store' +import { loginSuccess } from '@/store/reducers/authSlice' +import { IconCircleCheck, IconExclamationCircle } from '@tabler/icons-react' + +// ==============================|| Register ||============================== // + +// IMPORTANT: when updating this schema, update the schema on the server as well +// packages/server/src/enterprise/Interface.Enterprise.ts +const RegisterEnterpriseUserSchema = z + .object({ + username: z.string().min(1, 'Name is required'), + email: z.string().min(1, 'Email is required').email('Invalid email address'), + password: passwordSchema, + confirmPassword: z.string().min(1, 'Confirm Password is required'), + token: z.string().min(1, 'Invite Code is required') + }) + .refine((data) => data.password === data.confirmPassword, { + message: "Passwords don't match", + path: ['confirmPassword'] + }) + +const RegisterCloudUserSchema = z + .object({ + username: z.string().min(1, 'Name is required'), + email: z.string().min(1, 'Email is required').email('Invalid email address'), + password: passwordSchema, + confirmPassword: z.string().min(1, 'Confirm Password is required') + }) + .refine((data) => data.password === data.confirmPassword, { + message: "Passwords don't match", + path: ['confirmPassword'] + }) + +const RegisterPage = () => { + const theme = useTheme() + useNotifier() + const { isEnterpriseLicensed, isCloud, isOpenSource } = useConfig() + + const usernameInput = { + label: 'Username', + name: 'username', + type: 'text', + placeholder: 'John Doe' + } + + const passwordInput = { + label: 'Password', + name: 'password', + type: 'password', + placeholder: '********' + } + + const confirmPasswordInput = { + label: 'Confirm Password', + name: 'confirmPassword', + type: 'password', + placeholder: '********' + } + + const emailInput = { + label: 'EMail', + name: 'email', + type: 'email', + placeholder: 'user@company.com' + } + + const inviteCodeInput = { + label: 'Invite Code', + name: 'inviteCode', + type: 'text' + } + + const [params] = useSearchParams() + + const [email, setEmail] = useState('') + const [password, setPassword] = useState('') + const [confirmPassword, setConfirmPassword] = useState('') + const [token, setToken] = useState(params.get('token') ?? '') + const [username, setUsername] = useState('') + const [configuredSsoProviders, setConfiguredSsoProviders] = useState([]) + + const [loading, setLoading] = useState(false) + const [authError, setAuthError] = useState('') + const [successMsg, setSuccessMsg] = useState(undefined) + + const registerApi = useApi(accountApi.registerAccount) + const ssoLoginApi = useApi(ssoApi.ssoLogin) + const getDefaultProvidersApi = useApi(loginMethodApi.getDefaultLoginMethods) + const navigate = useNavigate() + + const register = async (event) => { + event.preventDefault() + if (isEnterpriseLicensed) { + const result = RegisterEnterpriseUserSchema.safeParse({ + username, + email, + token, + password, + confirmPassword + }) + if (result.success) { + setLoading(true) + const body = { + user: { + name: username, + email, + credential: password, + tempToken: token + } + } + await registerApi.request(body) + } else { + const errorMessages = result.error.errors.map((err) => err.message) + setAuthError(errorMessages.join(', ')) + } + } else if (isCloud) { + const formData = new FormData(event.target) + const referral = formData.get('referral') + const result = RegisterCloudUserSchema.safeParse({ + username, + email, + password, + confirmPassword + }) + if (result.success) { + setLoading(true) + const body = { + user: { + name: username, + email, + credential: password + } + } + if (referral) { + body.user.referral = referral + } + await registerApi.request(body) + } else { + const errorMessages = result.error.errors.map((err) => err.message) + setAuthError(errorMessages.join(', ')) + } + } + } + + const signInWithSSO = (ssoProvider) => { + //ssoLoginApi.request(ssoProvider) + window.location.href = `/api/v1/${ssoProvider}/login` + } + + useEffect(() => { + if (registerApi.error) { + if (isEnterpriseLicensed) { + setAuthError( + `Error in registering user. Please contact your administrator. (${registerApi.error?.response?.data?.message})` + ) + } else if (isCloud) { + setAuthError(`Error in registering user. Please try again.`) + } + setLoading(false) + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [registerApi.error]) + + useEffect(() => { + if (!isOpenSource) { + getDefaultProvidersApi.request() + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []) + + useEffect(() => { + if (ssoLoginApi.data) { + store.dispatch(loginSuccess(ssoLoginApi.data)) + navigate(location.state?.path || '/chatflows') + } + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [ssoLoginApi.data]) + + useEffect(() => { + if (ssoLoginApi.error) { + if (ssoLoginApi.error?.response?.status === 401 && ssoLoginApi.error?.response?.data.redirectUrl) { + window.location.href = ssoLoginApi.error.response.data.redirectUrl + } else { + setAuthError(ssoLoginApi.error.message) + } + } + }, [ssoLoginApi.error]) + + useEffect(() => { + if (getDefaultProvidersApi.data && getDefaultProvidersApi.data.providers) { + //data is an array of objects, store only the provider attribute + setConfiguredSsoProviders(getDefaultProvidersApi.data.providers.map((provider) => provider)) + } + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getDefaultProvidersApi.data]) + + useEffect(() => { + if (registerApi.data) { + setLoading(false) + setAuthError(undefined) + setConfirmPassword('') + setPassword('') + setToken('') + setUsername('') + setEmail('') + if (isEnterpriseLicensed) { + setSuccessMsg('Registration Successful. You will be redirected to the sign in page shortly.') + } else if (isCloud) { + setSuccessMsg('To complete your registration, please click on the verification link we sent to your email address') + } + setTimeout(() => { + navigate('/signin') + }, 3000) + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [registerApi.data]) + + return ( + <> + + + {authError && ( + } variant='filled' severity='error'> + {authError.split(', ').length > 0 ? ( + + {authError.split(', ').map((error, index) => ( + + ))} + + ) : ( + authError + )} + + )} + {successMsg && ( + } variant='filled' severity='success'> + {successMsg} + + )} + + Sign Up + + Already have an account?{' '} + + Sign In + + . + + +
    + + +
    + + Full Name * + +
    +
    + setUsername(newValue)} + value={username} + showDialog={false} + /> + + Is used for display purposes only. + +
    + +
    + + Email * + +
    +
    + setEmail(newValue)} + value={email} + showDialog={false} + /> + + Kindly use a valid email address. Will be used as login id. + +
    + {isEnterpriseLicensed && ( + +
    + + Invite Code * + +
    +
    + setToken(e.target.value)} + value={token} + /> + + Please copy the token you would have received in your email. + +
    + )} + +
    + + Password * + +
    +
    + setPassword(newValue)} value={password} /> + + + Password must be at least 8 characters long and contain at least one lowercase letter, one uppercase + letter, one digit, and one special character (@$!%*?&-). + + +
    + +
    + + Confirm Password * + +
    +
    + setConfirmPassword(newValue)} + value={confirmPassword} + /> + + Confirm your password. Must match the password typed above. + +
    + + Create Account + + {configuredSsoProviders.length > 0 && OR} + {configuredSsoProviders && + configuredSsoProviders.map( + (ssoProvider) => + //https://learn.microsoft.com/en-us/entra/identity-platform/howto-add-branding-in-apps + ssoProvider === 'azure' && ( + + ) + )} + {configuredSsoProviders && + configuredSsoProviders.map( + (ssoProvider) => + ssoProvider === 'google' && ( + + ) + )} + {configuredSsoProviders && + configuredSsoProviders.map( + (ssoProvider) => + ssoProvider === 'auth0' && ( + + ) + )} + {configuredSsoProviders && + configuredSsoProviders.map( + (ssoProvider) => + ssoProvider === 'github' && ( + + ) + )} +
    +
    +
    +
    + {loading && } + + ) +} + +export default RegisterPage diff --git a/packages/ui/src/views/auth/resetPassword.jsx b/packages/ui/src/views/auth/resetPassword.jsx new file mode 100644 index 00000000000..685b32663cd --- /dev/null +++ b/packages/ui/src/views/auth/resetPassword.jsx @@ -0,0 +1,257 @@ +import { useState } from 'react' +import { useDispatch } from 'react-redux' +import { Link, useNavigate, useSearchParams } from 'react-router-dom' + +// material-ui +import { Alert, Box, Button, OutlinedInput, Stack, Typography, useTheme } from '@mui/material' + +// project imports +import { closeSnackbar as closeSnackbarAction, enqueueSnackbar as enqueueSnackbarAction } from '@/store/actions' +import { StyledButton } from '@/ui-component/button/StyledButton' +import MainCard from '@/ui-component/cards/MainCard' +import { Input } from '@/ui-component/input/Input' +import { BackdropLoader } from '@/ui-component/loading/BackdropLoader' + +// API +import accountApi from '@/api/account.api' + +// utils +import useNotifier from '@/utils/useNotifier' +import { validatePassword } from '@/utils/validation' + +// Icons +import { IconExclamationCircle, IconX } from '@tabler/icons-react' + +// ==============================|| ResetPasswordPage ||============================== // + +const ResetPasswordPage = () => { + const theme = useTheme() + useNotifier() + const navigate = useNavigate() + const dispatch = useDispatch() + const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) + const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) + + const emailInput = { + label: 'Email', + name: 'email', + type: 'email', + placeholder: 'user@company.com' + } + + const passwordInput = { + label: 'Password', + name: 'password', + type: 'password', + placeholder: '********' + } + + const confirmPasswordInput = { + label: 'Confirm Password', + name: 'confirmPassword', + type: 'password', + placeholder: '********' + } + + const resetPasswordInput = { + label: 'Reset Token', + name: 'resetToken', + type: 'text' + } + + const [params] = useSearchParams() + const token = params.get('token') + + const [emailVal, setEmailVal] = useState('') + const [newPasswordVal, setNewPasswordVal] = useState('') + const [confirmPasswordVal, setConfirmPasswordVal] = useState('') + const [tokenVal, setTokenVal] = useState(token ?? '') + + const [loading, setLoading] = useState(false) + const [authErrors, setAuthErrors] = useState([]) + + const goLogin = () => { + navigate('/signin', { replace: true }) + } + + const validateAndSubmit = async (event) => { + event.preventDefault() + const validationErrors = [] + setAuthErrors([]) + if (!tokenVal) { + validationErrors.push('Token cannot be left blank!') + } + if (newPasswordVal !== confirmPasswordVal) { + validationErrors.push('New Password and Confirm Password do not match.') + } + const passwordErrors = validatePassword(newPasswordVal) + if (passwordErrors.length > 0) { + validationErrors.push(...passwordErrors) + } + if (validationErrors.length > 0) { + setAuthErrors(validationErrors) + return + } + const body = { + user: { + email: emailVal, + tempToken: tokenVal, + password: newPasswordVal + } + } + setLoading(true) + try { + const updateResponse = await accountApi.resetPassword(body) + setAuthErrors([]) + setLoading(false) + if (updateResponse.data) { + enqueueSnackbar({ + message: 'Password reset successful', + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + setEmailVal('') + setTokenVal('') + setNewPasswordVal('') + setConfirmPasswordVal('') + goLogin() + } + } catch (error) { + setLoading(false) + setAuthErrors([typeof error.response.data === 'object' ? error.response.data.message : error.response.data]) + enqueueSnackbar({ + message: `Failed to reset password!`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + } + } + + return ( + <> + + + {authErrors && authErrors.length > 0 && ( + } variant='filled' severity='error'> +
      + {authErrors.map((msg, key) => ( +
    • {msg}
    • + ))} +
    +
    + )} + + Reset Password + + + Back to Login + + . + + +
    + + +
    + + Email * + + +
    +
    + setEmailVal(newValue)} + value={emailVal} + showDialog={false} + /> +
    + +
    + + Reset Token * + +
    +
    + setTokenVal(e.target.value)} + value={tokenVal} + sx={{ mt: '8px' }} + /> + + Please copy the token you received in your email. + +
    + +
    + + New Password * + + +
    +
    + setNewPasswordVal(newValue)} + value={newPasswordVal} + showDialog={false} + /> + + + Password must be at least 8 characters long and contain at least one lowercase letter, one uppercase + letter, one digit, and one special character (@$!%*?&-). + + +
    + +
    + + Confirm Password * + +
    +
    + setConfirmPasswordVal(newValue)} + value={confirmPasswordVal} + showDialog={false} + /> + + Confirm your new password. Must match the password typed above. + +
    + + + Update Password + +
    +
    +
    +
    + {loading && } + + ) +} + +export default ResetPasswordPage diff --git a/packages/ui/src/views/auth/signIn.jsx b/packages/ui/src/views/auth/signIn.jsx new file mode 100644 index 00000000000..95845bf3f77 --- /dev/null +++ b/packages/ui/src/views/auth/signIn.jsx @@ -0,0 +1,351 @@ +import { useEffect, useState } from 'react' +import { useSelector } from 'react-redux' +import { Link, useLocation, useNavigate } from 'react-router-dom' + +// material-ui +import { Stack, useTheme, Typography, Box, Alert, Button, Divider, Icon } from '@mui/material' +import { IconExclamationCircle } from '@tabler/icons-react' +import { LoadingButton } from '@mui/lab' + +// project imports +import MainCard from '@/ui-component/cards/MainCard' +import { Input } from '@/ui-component/input/Input' + +// Hooks +import useApi from '@/hooks/useApi' +import { useConfig } from '@/store/context/ConfigContext' + +// API +import authApi from '@/api/auth' +import accountApi from '@/api/account.api' +import loginMethodApi from '@/api/loginmethod' +import ssoApi from '@/api/sso' + +// utils +import useNotifier from '@/utils/useNotifier' + +// store +import { loginSuccess, logoutSuccess } from '@/store/reducers/authSlice' +import { store } from '@/store' + +// icons +import AzureSSOLoginIcon from '@/assets/images/microsoft-azure.svg' +import GoogleSSOLoginIcon from '@/assets/images/google.svg' +import Auth0SSOLoginIcon from '@/assets/images/auth0.svg' +import GithubSSOLoginIcon from '@/assets/images/github.svg' + +// ==============================|| SignInPage ||============================== // + +const SignInPage = () => { + const theme = useTheme() + useSelector((state) => state.customization) + useNotifier() + const { isEnterpriseLicensed, isCloud, isOpenSource } = useConfig() + + const usernameInput = { + label: 'Username', + name: 'username', + type: 'email', + placeholder: 'user@company.com' + } + const passwordInput = { + label: 'Password', + name: 'password', + type: 'password', + placeholder: '********' + } + const [usernameVal, setUsernameVal] = useState('') + const [passwordVal, setPasswordVal] = useState('') + const [configuredSsoProviders, setConfiguredSsoProviders] = useState([]) + const [authError, setAuthError] = useState(undefined) + const [loading, setLoading] = useState(false) + const [showResendButton, setShowResendButton] = useState(false) + const [successMessage, setSuccessMessage] = useState('') + + const loginApi = useApi(authApi.login) + const ssoLoginApi = useApi(ssoApi.ssoLogin) + const getDefaultProvidersApi = useApi(loginMethodApi.getDefaultLoginMethods) + const navigate = useNavigate() + const location = useLocation() + const resendVerificationApi = useApi(accountApi.resendVerificationEmail) + + const doLogin = (event) => { + event.preventDefault() + setLoading(true) + const body = { + email: usernameVal, + password: passwordVal + } + loginApi.request(body) + } + + useEffect(() => { + if (loginApi.error) { + setLoading(false) + if (loginApi.error.response.status === 401 && loginApi.error.response.data.redirectUrl) { + window.location.href = loginApi.error.response.data.data.redirectUrl + } else { + setAuthError(loginApi.error.response.data.message) + } + } + }, [loginApi.error]) + + useEffect(() => { + store.dispatch(logoutSuccess()) + if (!isOpenSource) { + getDefaultProvidersApi.request() + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []) + + useEffect(() => { + // Parse the "user" query parameter from the URL + const queryParams = new URLSearchParams(location.search) + const errorData = queryParams.get('error') + if (!errorData) return + const parsedErrorData = JSON.parse(decodeURIComponent(errorData)) + setAuthError(parsedErrorData.message) + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [location.search]) + + useEffect(() => { + if (loginApi.data) { + setLoading(false) + store.dispatch(loginSuccess(loginApi.data)) + navigate(location.state?.path || '/chatflows') + //navigate(0) + } + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [loginApi.data]) + + useEffect(() => { + if (ssoLoginApi.data) { + store.dispatch(loginSuccess(ssoLoginApi.data)) + navigate(location.state?.path || '/chatflows') + } + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [ssoLoginApi.data]) + + useEffect(() => { + if (ssoLoginApi.error) { + if (ssoLoginApi.error?.response?.status === 401 && ssoLoginApi.error?.response?.data.redirectUrl) { + window.location.href = ssoLoginApi.error.response.data.redirectUrl + } else { + setAuthError(ssoLoginApi.error.message) + } + } + }, [ssoLoginApi.error]) + + useEffect(() => { + if (getDefaultProvidersApi.data && getDefaultProvidersApi.data.providers) { + //data is an array of objects, store only the provider attribute + setConfiguredSsoProviders(getDefaultProvidersApi.data.providers.map((provider) => provider)) + } + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getDefaultProvidersApi.data]) + + useEffect(() => { + if (authError === 'User Email Unverified') { + setShowResendButton(true) + } else { + setShowResendButton(false) + } + }, [authError]) + + const signInWithSSO = (ssoProvider) => { + window.location.href = `/api/v1/${ssoProvider}/login` + } + + const handleResendVerification = async () => { + try { + await resendVerificationApi.request({ email: usernameVal }) + setAuthError(undefined) + setSuccessMessage('Verification email has been sent successfully.') + setShowResendButton(false) + } catch (error) { + setAuthError(error.response?.data?.message || 'Failed to send verification email.') + } + } + + return ( + <> + + + {successMessage && ( + setSuccessMessage('')}> + {successMessage} + + )} + {authError && ( + } variant='filled' severity='error'> + {authError} + + )} + {showResendButton && ( + + + + )} + + Sign In + {isCloud && ( + + Don't have an account?{' '} + + Sign up for free + + . + + )} + {isEnterpriseLicensed && ( + + Have an invite code?{' '} + + Sign up for an account + + . + + )} + +
    + + +
    + + Email * + +
    +
    + setUsernameVal(newValue)} + value={usernameVal} + showDialog={false} + /> +
    + +
    + + Password * + +
    +
    + setPasswordVal(newValue)} value={passwordVal} /> + + + Forgot password? + + + {isCloud && ( + + + Migrate from existing account? + + + )} +
    + + Login + + {configuredSsoProviders && configuredSsoProviders.length > 0 && OR} + {configuredSsoProviders && + configuredSsoProviders.map( + (ssoProvider) => + //https://learn.microsoft.com/en-us/entra/identity-platform/howto-add-branding-in-apps + ssoProvider === 'azure' && ( + + ) + )} + {configuredSsoProviders && + configuredSsoProviders.map( + (ssoProvider) => + ssoProvider === 'google' && ( + + ) + )} + {configuredSsoProviders && + configuredSsoProviders.map( + (ssoProvider) => + ssoProvider === 'auth0' && ( + + ) + )} + {configuredSsoProviders && + configuredSsoProviders.map( + (ssoProvider) => + ssoProvider === 'github' && ( + + ) + )} +
    +
    +
    +
    + + ) +} + +export default SignInPage diff --git a/packages/ui/src/views/auth/ssoConfig.jsx b/packages/ui/src/views/auth/ssoConfig.jsx new file mode 100644 index 00000000000..b57a42eeb5e --- /dev/null +++ b/packages/ui/src/views/auth/ssoConfig.jsx @@ -0,0 +1,1028 @@ +import { useEffect, useState } from 'react' +import { useDispatch, useSelector } from 'react-redux' +import { closeSnackbar as closeSnackbarAction, enqueueSnackbar as enqueueSnackbarAction } from '@/store/actions' + +// material-ui +import { Popover, IconButton, Stack, Typography, Box, OutlinedInput, Button, Tabs, Tab, Divider } from '@mui/material' +import { useTheme } from '@mui/material/styles' + +// project imports +import MainCard from '@/ui-component/cards/MainCard' +import ViewHeader from '@/layout/MainLayout/ViewHeader' +import ErrorBoundary from '@/ErrorBoundary' +import { BackdropLoader } from '@/ui-component/loading/BackdropLoader' +import { SwitchInput } from '@/ui-component/switch/Switch' +import { PermissionButton, StyledPermissionButton } from '@/ui-component/button/RBACButtons' +import { TabPanel } from '@/ui-component/tabs/TabPanel' + +// API +import loginMethodApi from '@/api/loginmethod' +import useApi from '@/hooks/useApi' + +// utils +import useNotifier from '@/utils/useNotifier' +import { useError } from '@/store/context/ErrorContext' + +// Icons +import { IconAlertTriangle, IconX, IconCopy } from '@tabler/icons-react' +import MicrosoftSVG from '@/assets/images/microsoft-azure.svg' +import GoogleSVG from '@/assets/images/google.svg' +import Auth0SVG from '@/assets/images/auth0.svg' +import GithubSVG from '@/assets/images/github.svg' + +// const +import { gridSpacing } from '@/store/constant' + +const SSOConfigPage = () => { + useNotifier() + const { error, setError } = useError() + const theme = useTheme() + + const dispatch = useDispatch() + const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) + const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) + + const [azureConfigEnabled, setAzureConfigEnabled] = useState(false) + const [azureTenantID, setAzureTenantID] = useState('') + const [azureClientID, setAzureClientID] = useState('') + const [azureClientSecret, setAzureClientSecret] = useState('') + const [azureCallbackURL, setAzureCallbackURL] = useState('') + + const [googleConfigEnabled, setGoogleConfigEnabled] = useState(false) + const [googleClientID, setGoogleClientID] = useState('') + const [googleClientSecret, setGoogleClientSecret] = useState('') + const [googleCallbackURL, setGoogleCallbackURL] = useState('') + + const [githubConfigEnabled, setGithubConfigEnabled] = useState(false) + const [githubClientID, setGithubClientID] = useState('') + const [githubClientSecret, setGithubClientSecret] = useState('') + const [githubCallbackURL, setGithubCallbackURL] = useState('') + + const [auth0ConfigEnabled, setAuth0ConfigEnabled] = useState(false) + const [auth0Domain, setAuth0Domain] = useState('') + const [auth0ClientID, setAuth0ClientID] = useState('') + const [auth0ClientSecret, setAuth0ClientSecret] = useState('') + const [auth0CallbackURL, setAuth0CallbackURL] = useState('') + + const [loading, setLoading] = useState(false) + const [authErrors, setAuthErrors] = useState([]) + + const getLoginMethodsApi = useApi(loginMethodApi.getLoginMethods) + const [tabValue, setTabValue] = useState(0) + + const [copyAnchorEl, setCopyAnchorEl] = useState(null) + const openCopyPopOver = Boolean(copyAnchorEl) + + const currentUser = useSelector((state) => state.auth.user) + + const handleCloseCopyPopOver = () => { + setCopyAnchorEl(null) + } + + const validateAzureFields = (validationErrors) => { + if (!azureTenantID) { + validationErrors.push('Azure TenantID cannot be left blank!') + } + if (!azureClientID) { + validationErrors.push('Azure ClientID cannot be left blank!') + } + if (!azureClientSecret) { + validationErrors.push('Azure Client Secret cannot be left blank!') + } + } + const validateGoogleFields = (validationErrors) => { + if (!googleClientID) { + validationErrors.push('Google ClientID cannot be left blank!') + } + if (!googleClientSecret) { + validationErrors.push('Google Client Secret cannot be left blank!') + } + } + + const validateGithubFields = (validationErrors) => { + if (!githubClientID) { + validationErrors.push('Github ClientID cannot be left blank!') + } + if (!githubClientSecret) { + validationErrors.push('Github Client Secret cannot be left blank!') + } + } + + const validateAuth0Fields = (validationErrors) => { + if (!auth0Domain) { + validationErrors.push('Auth0 Domain cannot be left blank!') + } + if (!auth0ClientID) { + validationErrors.push('Auth0 ClientID cannot be left blank!') + } + if (!auth0ClientSecret) { + validationErrors.push('Auth0 Client Secret cannot be left blank!') + } + } + + const validateFields = () => { + const validationErrors = [] + setAuthErrors([]) + if (azureConfigEnabled) { + validateAzureFields(validationErrors) + } + if (googleConfigEnabled) { + validateGoogleFields(validationErrors) + } + if (auth0ConfigEnabled) { + validateAuth0Fields(validationErrors) + } + if (githubConfigEnabled) { + validateGithubFields(validationErrors) + } + return validationErrors + } + + function constructRequestBody() { + const body = { + organizationId: currentUser.activeOrganizationId, + userId: currentUser.id, + providers: [ + { + providerLabel: 'Microsoft', + providerName: 'azure', + config: { + tenantID: azureTenantID, + clientID: azureClientID, + clientSecret: azureClientSecret + }, + status: azureConfigEnabled ? 'enable' : 'disable' + }, + { + providerLabel: 'Google', + providerName: 'google', + config: { + clientID: googleClientID, + clientSecret: googleClientSecret + }, + status: googleConfigEnabled ? 'enable' : 'disable' + }, + { + providerLabel: 'Auth0', + providerName: 'auth0', + config: { + domain: auth0Domain, + clientID: auth0ClientID, + clientSecret: auth0ClientSecret + }, + status: auth0ConfigEnabled ? 'enable' : 'disable' + }, + { + providerLabel: 'Github', + providerName: 'github', + config: { + clientID: githubClientID, + clientSecret: githubClientSecret + }, + status: githubConfigEnabled ? 'enable' : 'disable' + } + ] + } + return body + } + + const validateAndSubmit = async () => { + const validationErrors = validateFields() + if (validationErrors.length > 0) { + setAuthErrors(validationErrors) + return + } + setLoading(true) + try { + const updateResponse = await loginMethodApi.updateLoginMethods(constructRequestBody()) + setAuthErrors([]) + setLoading(false) + if (updateResponse.data) { + enqueueSnackbar({ + message: 'SSO Configuration Updated!', + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + } + } catch (error) { + setLoading(false) + setAuthErrors([typeof error.response.data === 'object' ? error.response.data.message : error.response.data]) + enqueueSnackbar({ + message: `Failed to update SSO Configuration.`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + } + } + + const validateAndTest = async (providerName) => { + let validationErrors = [] + switch (providerName) { + case 'Azure': + validateAzureFields(validationErrors) + break + case 'Google': + validateGoogleFields(validationErrors) + break + case 'Auth0': + validateAuth0Fields(validationErrors) + break + case 'Gtihub': + validateGithubFields(validationErrors) + break + } + if (validationErrors.length > 0) { + setAuthErrors(validationErrors) + return + } + const body = constructRequestBody() + // depending on the tab value, we need to set the provider name and remove the other provider + body.providers = [body.providers[tabValue]] + body.providerName = providerName.toLowerCase() + setLoading(true) + try { + const updateResponse = await loginMethodApi.testLoginMethod(body) + setAuthErrors([]) + setLoading(false) + if (updateResponse.data?.message) { + enqueueSnackbar({ + message: `${getSelectedProviderName()} SSO Configuration is Valid!`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + } + if (updateResponse.data.error) { + enqueueSnackbar({ + message: `${updateResponse.data.error}`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + } + } catch (error) { + setLoading(false) + setAuthErrors([typeof error.response.data === 'object' ? error.response.data.message : error.response.data]) + enqueueSnackbar({ + message: `Failed to verify ${getSelectedProviderName()} SSO Configuration.`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + } + } + + const handleAzureChange = (value) => { + setAzureConfigEnabled(value) + } + + const handleGoogleChange = (value) => { + setGoogleConfigEnabled(value) + } + + const handleAuth0Change = (value) => { + setAuth0ConfigEnabled(value) + } + + const handleGithubChange = (value) => { + setGithubConfigEnabled(value) + } + + const getSelectedProviderName = () => { + switch (tabValue) { + case 0: + return 'Azure' + case 1: + return 'Google' + case 2: + return 'Auth0' + case 3: + return 'Github' + } + } + + useEffect(() => { + if (getLoginMethodsApi.data) { + const data = getLoginMethodsApi.data + const azureConfig = data.providers.find((provider) => provider.name === 'azure') + const azureCallback = data.callbacks.find((callback) => callback.providerName === 'azure') + if (azureCallback) { + setAzureCallbackURL(azureCallback.callbackURL) + } + if (azureConfig) { + setAzureTenantID(azureConfig.config.tenantID) + setAzureClientID(azureConfig.config.clientID) + setAzureClientSecret(azureConfig.config.clientSecret) + setAzureConfigEnabled(azureConfig.status === 'enable') + } + const googleConfig = data.providers.find((provider) => provider.name === 'google') + const googleCallback = data.callbacks.find((callback) => callback.providerName === 'google') + if (googleCallback) { + setGoogleCallbackURL(googleCallback.callbackURL) + } + if (googleConfig) { + setGoogleClientID(googleConfig.config.clientID) + setGoogleClientSecret(googleConfig.config.clientSecret) + setGoogleConfigEnabled(googleConfig.status === 'enable') + } + const auth0Config = data.providers.find((provider) => provider.name === 'auth0') + const auth0Callback = data.callbacks.find((callback) => callback.providerName === 'auth0') + if (auth0Callback) { + setAuth0CallbackURL(auth0Callback.callbackURL) + } + + if (auth0Config) { + setAuth0Domain(auth0Config.config.domain) + setAuth0ClientID(auth0Config.config.clientID) + setAuth0ClientSecret(auth0Config.config.clientSecret) + setAuth0ConfigEnabled(auth0Config.status === 'enable') + } + + const githubConfig = data.providers.find((provider) => provider.name === 'github') + const githubCallback = data.callbacks.find((callback) => callback.providerName === 'github') + if (githubCallback) { + setGithubCallbackURL(githubCallback.callbackURL) + } + if (githubConfig) { + setGithubClientID(githubConfig.config.clientID) + setGithubClientSecret(githubConfig.config.clientSecret) + setGithubConfigEnabled(githubConfig.status === 'enable') + } + setLoading(false) + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getLoginMethodsApi.data]) + + useEffect(() => { + if (getLoginMethodsApi.error) { + setLoading(false) + setError(getLoginMethodsApi.error) + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getLoginMethodsApi.error]) + + useEffect(() => { + setLoading(true) + getLoginMethodsApi.request(currentUser.activeOrganizationId) + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []) + + return ( + <> + + {error ? ( + + ) : ( + + + {authErrors && authErrors.length > 0 && ( +
    + + + + + +
      + {authErrors.map((msg, key) => ( + +
    • {msg}
    • +
      + ))} +
    +
    +
    +
    + )} + setTabValue(val)} aria-label='tabs'> + } + sx={{ + minHeight: '40px', + height: '40px', + display: 'flex', + alignItems: 'center', + mb: 1 + }} + value={0} + label={ + <> + Microsoft + {azureConfigEnabled && ( +
    +
    +
    + )} + + } + /> + } + sx={{ + minHeight: '40px', + height: '40px', + display: 'flex', + alignItems: 'center', + mb: 1 + }} + value={1} + label={ + <> + Google + {googleConfigEnabled && ( +
    +
    +
    + )} + + } + /> + } + sx={{ + minHeight: '40px', + height: '40px', + display: 'flex', + alignItems: 'center', + mb: 1 + }} + value={2} + label={ + <> + Auth0 + {auth0ConfigEnabled && ( +
    +
    +
    + )} + + } + /> + } + sx={{ + minHeight: '40px', + height: '40px', + display: 'flex', + alignItems: 'center', + mb: 1 + }} + value={3} + label={ + <> + Github + {githubConfigEnabled && ( +
    +
    +
    + )} + + } + /> + + + + + Enable SSO Login + + + + + + {azureCallbackURL} + + { + navigator.clipboard.writeText(azureCallbackURL) + setCopyAnchorEl(event.currentTarget) + setTimeout(() => { + handleCloseCopyPopOver() + }, 1500) + }} + > + + + + + +
    + Tenant ID +
    +
    + setAzureTenantID(e.target.value)} + value={azureTenantID} + /> +
    + +
    + + Client ID * + +
    +
    + setAzureClientID(e.target.value)} + value={azureClientID} + /> +
    + +
    + + Client Secret * + +
    +
    + setAzureClientSecret(e.target.value)} + value={azureClientSecret} + /> +
    +
    +
    + + + + Enable SSO Login + + + + + + {googleCallbackURL} + + { + navigator.clipboard.writeText(googleCallbackURL) + setCopyAnchorEl(event.currentTarget) + setTimeout(() => { + handleCloseCopyPopOver() + }, 1500) + }} + > + + + + + +
    + + Client ID * + +
    +
    + setGoogleClientID(e.target.value)} + value={googleClientID} + /> +
    + +
    + + Client Secret * + +
    +
    + setGoogleClientSecret(e.target.value)} + value={googleClientSecret} + /> +
    +
    +
    + + + + Enable SSO Login + + + + + + {auth0CallbackURL} + + { + navigator.clipboard.writeText(auth0CallbackURL) + setCopyAnchorEl(event.currentTarget) + setTimeout(() => { + handleCloseCopyPopOver() + }, 1500) + }} + > + + + + + +
    + Auth0 Domain +
    +
    + setAuth0Domain(e.target.value)} + value={auth0Domain} + /> +
    + +
    + + Client ID * + +
    +
    + setAuth0ClientID(e.target.value)} + value={auth0ClientID} + /> +
    + +
    + + Client Secret * + +
    +
    + setAuth0ClientSecret(e.target.value)} + value={auth0ClientSecret} + /> +
    +
    +
    + + + + Enable SSO Login + + + + + + {githubCallbackURL} + + { + navigator.clipboard.writeText(githubCallbackURL) + setCopyAnchorEl(event.currentTarget) + setTimeout(() => { + handleCloseCopyPopOver() + }, 1500) + }} + > + + + + + +
    + + Client ID * + +
    +
    + setGithubClientID(e.target.value)} + value={githubClientID} + /> +
    + +
    + + Client Secret * + +
    +
    + setGithubClientSecret(e.target.value)} + value={githubClientSecret} + /> +
    +
    +
    + + + + validateAndTest(getSelectedProviderName())} + > + {'Test ' + getSelectedProviderName() + ' Configuration'} + + + validateAndSubmit()} + > + Save + + + + )} + + {loading && } + + + Copied! + + + + ) +} + +export default SSOConfigPage diff --git a/packages/ui/src/views/auth/ssoSuccess.jsx b/packages/ui/src/views/auth/ssoSuccess.jsx new file mode 100644 index 00000000000..e8e78405e49 --- /dev/null +++ b/packages/ui/src/views/auth/ssoSuccess.jsx @@ -0,0 +1,36 @@ +import { useEffect } from 'react' +import { useLocation, useNavigate } from 'react-router-dom' +import { store } from '@/store' +import { loginSuccess } from '@/store/reducers/authSlice' + +const SSOSuccess = () => { + const location = useLocation() + const navigate = useNavigate() + + useEffect(() => { + // Parse the "user" query parameter from the URL + const queryParams = new URLSearchParams(location.search) + const userData = queryParams.get('user') + + if (userData) { + // Decode the user data and save it to the state + try { + const parsedUser = JSON.parse(decodeURIComponent(userData)) + store.dispatch(loginSuccess(parsedUser)) + navigate('/chatflows') + } catch (error) { + console.error('Failed to parse user data:', error) + } + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [location.search]) + + return ( +
    +

    Loading dashboard...

    +

    Loading data...

    +
    + ) +} + +export default SSOSuccess diff --git a/packages/ui/src/views/auth/unauthorized.jsx b/packages/ui/src/views/auth/unauthorized.jsx new file mode 100644 index 00000000000..492007741cf --- /dev/null +++ b/packages/ui/src/views/auth/unauthorized.jsx @@ -0,0 +1,60 @@ +import MainCard from '@/ui-component/cards/MainCard' +import { Box, Stack, Typography } from '@mui/material' +import unauthorizedSVG from '@/assets/images/unauthorized.svg' +import { StyledButton } from '@/ui-component/button/StyledButton' +import { Link } from 'react-router-dom' +import { useSelector } from 'react-redux' + +// ==============================|| UnauthorizedPage ||============================== // + +const UnauthorizedPage = () => { + const currentUser = useSelector((state) => state.auth.user) + + return ( + <> + + + + + unauthorizedSVG + + + 403 Forbidden + + + You do not have permission to access this page. + + {currentUser ? ( + + Back to Home + + ) : ( + + Back to Login + + )} + + + + + ) +} + +export default UnauthorizedPage diff --git a/packages/ui/src/views/auth/verify-email.jsx b/packages/ui/src/views/auth/verify-email.jsx new file mode 100644 index 00000000000..1a5d050741d --- /dev/null +++ b/packages/ui/src/views/auth/verify-email.jsx @@ -0,0 +1,124 @@ +import { useEffect } from 'react' +import { useNavigate, useSearchParams } from 'react-router-dom' + +// material-ui +import { Stack, Typography, Box, useTheme, CircularProgress } from '@mui/material' + +// project imports +import MainCard from '@/ui-component/cards/MainCard' + +// API +import accountApi from '@/api/account.api' + +// Hooks +import useApi from '@/hooks/useApi' + +// icons +import { IconCheck } from '@tabler/icons-react' +import { useState } from 'react' +import { IconX } from '@tabler/icons-react' + +const VerifyEmail = () => { + const accountVerifyApi = useApi(accountApi.verifyAccountEmail) + + const [searchParams] = useSearchParams() + const [loading, setLoading] = useState(false) + const [verificationError, setVerificationError] = useState('') + const [verificationSuccess, setVerificationSuccess] = useState(false) + const navigate = useNavigate() + + const theme = useTheme() + + useEffect(() => { + if (accountVerifyApi.data) { + setLoading(false) + setVerificationError('') + setVerificationSuccess(true) + setTimeout(() => { + navigate('/signin') + }, 3000) + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [accountVerifyApi.data]) + + useEffect(() => { + if (accountVerifyApi.error) { + setLoading(false) + setVerificationError(accountVerifyApi.error) + setVerificationSuccess(false) + } + }, [accountVerifyApi.error]) + + useEffect(() => { + const token = searchParams.get('token') + if (token) { + setLoading(true) + setVerificationError('') + setVerificationSuccess(false) + accountVerifyApi.request({ user: { tempToken: token } }) + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []) + + return ( + + + + + {loading && ( + <> + + Verifying Email... + + )} + {verificationError && ( + <> + + + + Verification Failed. + + )} + {verificationSuccess && ( + <> + + + + Email Verified Successfully. + + )} + + + + + ) +} + +export default VerifyEmail diff --git a/packages/ui/src/views/canvas/CanvasHeader.jsx b/packages/ui/src/views/canvas/CanvasHeader.jsx index 416e7ba9f5d..bef733ecead 100644 --- a/packages/ui/src/views/canvas/CanvasHeader.jsx +++ b/packages/ui/src/views/canvas/CanvasHeader.jsx @@ -19,6 +19,7 @@ import ChatflowConfigurationDialog from '@/ui-component/dialog/ChatflowConfigura import UpsertHistoryDialog from '@/views/vectorstore/UpsertHistoryDialog' import ViewLeadsDialog from '@/ui-component/dialog/ViewLeadsDialog' import ExportAsTemplateDialog from '@/ui-component/dialog/ExportAsTemplateDialog' +import { Available } from '@/ui-component/rbac/available' // API import chatflowsApi from '@/api/chatflows' @@ -60,6 +61,8 @@ const CanvasHeader = ({ chatflow, isAgentCanvas, isAgentflowV2, handleSaveFlow, const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) + const [savePermission, setSavePermission] = useState(isAgentCanvas ? 'agentflows:create' : 'chatflows:create') + const title = isAgentCanvas ? 'Agents' : 'Chatflow' const updateChatflowApi = useApi(chatflowsApi.updateChatflow) @@ -215,12 +218,14 @@ const CanvasHeader = ({ chatflow, isAgentCanvas, isAgentflowV2, handleSaveFlow, const onConfirmSaveName = (flowName) => { setFlowDialogOpen(false) + setSavePermission(isAgentCanvas ? 'agentflows:update' : 'chatflows:update') handleSaveFlow(flowName) } useEffect(() => { if (updateChatflowApi.data) { setFlowName(updateChatflowApi.data.name) + setSavePermission(isAgentCanvas ? 'agentflows:update' : 'chatflows:update') dispatch({ type: SET_CHATFLOW, chatflow: updateChatflowApi.data }) } setEditingFlowName(false) @@ -289,27 +294,29 @@ const CanvasHeader = ({ chatflow, isAgentCanvas, isAgentflowV2, handleSaveFlow, {canvas.isDirty && *} {flowName} {chatflow?.id && ( - - setEditingFlowName(true)} - > - - - + + + setEditingFlowName(true)} + > + + + + )} ) : ( @@ -401,26 +408,28 @@ const CanvasHeader = ({ chatflow, isAgentCanvas, isAgentflowV2, handleSaveFlow, )} - - - - - + + + + + + + { const dialogProp = { @@ -104,7 +106,7 @@ const CredentialInputHandler = ({ inputParam, data, onSelect, disabled = false } name={inputParam.name} nodeData={data} value={credentialId ?? 'choose an option'} - isCreateNewOption={true} + isCreateNewOption={hasPermission('credentials:create')} credentialNames={inputParam.credentialNames} onSelect={(newValue) => { setCredentialId(newValue) @@ -112,7 +114,7 @@ const CredentialInputHandler = ({ inputParam, data, onSelect, disabled = false } }} onCreateNew={() => addAsyncOption(inputParam.name)} /> - {credentialId && ( + {credentialId && hasPermission('credentials:update') && ( editCredential(credentialId)}> diff --git a/packages/ui/src/views/canvas/NodeInputHandler.jsx b/packages/ui/src/views/canvas/NodeInputHandler.jsx index bc3afa66aa0..df22995c630 100644 --- a/packages/ui/src/views/canvas/NodeInputHandler.jsx +++ b/packages/ui/src/views/canvas/NodeInputHandler.jsx @@ -1037,6 +1037,7 @@ const NodeInputHandler = ({ variant='outlined' onClick={() => { data.inputs[inputParam.name] = inputParam.codeExample + setReloadTimestamp(Date.now().toString()) }} > See Example @@ -1044,6 +1045,7 @@ const NodeInputHandler = ({ )}
    { const theme = useTheme() const canvas = useSelector((state) => state.canvas) + const customization = useSelector((state) => state.customization) const { deleteNode, duplicateNode } = useContext(flowContext) const [inputParam] = data.inputParams @@ -31,12 +32,23 @@ const StickyNote = ({ data }) => { setOpen(true) } + const defaultColor = '#FFE770' // fallback color if data.color is not present + const nodeColor = data.color || defaultColor + const getBorderColor = () => { if (data.selected) return theme.palette.primary.main - else if (theme?.customization?.isDarkMode) return theme.palette.grey[900] + 25 + else if (customization?.isDarkMode) return theme.palette.grey[700] else return theme.palette.grey[900] + 50 } + const getBackgroundColor = () => { + if (customization?.isDarkMode) { + return data.selected ? darken(nodeColor, 0.7) : darken(nodeColor, 0.8) + } else { + return data.selected ? lighten(nodeColor, 0.1) : lighten(nodeColor, 0.2) + } + } + return ( <> { sx={{ padding: 0, borderColor: getBorderColor(), - backgroundColor: data.selected ? '#FFDC00' : '#FFE770' + backgroundColor: getBackgroundColor() }} border={false} > @@ -66,8 +78,12 @@ const StickyNote = ({ data }) => { onClick={() => { duplicateNode(data.id) }} - sx={{ height: '35px', width: '35px', '&:hover': { color: theme?.palette.primary.main } }} - color={theme?.customization?.isDarkMode ? theme.colors?.paper : 'inherit'} + sx={{ + height: '35px', + width: '35px', + color: customization?.isDarkMode ? 'white' : 'inherit', + '&:hover': { color: theme?.palette.primary.main } + }} > @@ -76,8 +92,12 @@ const StickyNote = ({ data }) => { onClick={() => { deleteNode(data.id) }} - sx={{ height: '35px', width: '35px', '&:hover': { color: 'red' } }} - color={theme?.customization?.isDarkMode ? theme.colors?.paper : 'inherit'} + sx={{ + height: '35px', + width: '35px', + color: customization?.isDarkMode ? 'white' : 'inherit', + '&:hover': { color: theme?.palette.error.main } + }} > diff --git a/packages/ui/src/views/canvas/index.css b/packages/ui/src/views/canvas/index.css index 3ad92678636..31e7edccb28 100644 --- a/packages/ui/src/views/canvas/index.css +++ b/packages/ui/src/views/canvas/index.css @@ -47,3 +47,42 @@ cursor: crosshair; background: #5dba62 !important; } + +/* Dark mode controls styling */ +.dark-mode-controls { + --xy-controls-button-background-color-default: #2d2d2d; + --xy-controls-button-background-color-hover-default: #404040; + --xy-controls-button-border-color-default: #525252; + --xy-controls-box-shadow-default: 0 0 2px 1px rgba(255, 255, 255, 0.1); +} + +.dark-mode-controls .react-flow__controls-button { + background-color: #2d2d2d; + border-color: #525252; + color: #ffffff; + border: 1px solid #525252; +} + +.dark-mode-controls .react-flow__controls-button:hover { + background-color: #404040; +} + +.dark-mode-controls .react-flow__controls-button.react-flow__controls-interactive { + background-color: #2d2d2d; + border-color: #525252; + color: #ffffff; +} + +.dark-mode-controls .react-flow__controls-button.react-flow__controls-interactive:hover { + background-color: #404040; +} + +.dark-mode-controls .react-flow__controls-button svg { + color: #ffffff; + fill: #ffffff; +} + +.dark-mode-controls .react-flow__controls-button:hover svg { + color: #ffffff; + fill: #ffffff; +} diff --git a/packages/ui/src/views/canvas/index.jsx b/packages/ui/src/views/canvas/index.jsx index 028c3c4626c..ebfbd0506fa 100644 --- a/packages/ui/src/views/canvas/index.jsx +++ b/packages/ui/src/views/canvas/index.jsx @@ -35,9 +35,10 @@ import chatflowsApi from '@/api/chatflows' // Hooks import useApi from '@/hooks/useApi' import useConfirm from '@/hooks/useConfirm' +import { useAuth } from '@/hooks/useAuth' // icons -import { IconX, IconRefreshAlert } from '@tabler/icons-react' +import { IconX, IconRefreshAlert, IconMagnetFilled, IconMagnetOff } from '@tabler/icons-react' // utils import { @@ -62,6 +63,7 @@ const edgeTypes = { buttonedge: ButtonEdge } const Canvas = () => { const theme = useTheme() const navigate = useNavigate() + const { hasAssignedWorkspace } = useAuth() const { state } = useLocation() const templateFlowData = state ? state.templateFlowData : '' @@ -75,6 +77,7 @@ const Canvas = () => { const { confirm } = useConfirm() const dispatch = useDispatch() + const customization = useSelector((state) => state.customization) const canvas = useSelector((state) => state.canvas) const [canvasDataStore, setCanvasDataStore] = useState(canvas) const [chatflow, setChatflow] = useState(null) @@ -94,15 +97,21 @@ const Canvas = () => { const [selectedNode, setSelectedNode] = useState(null) const [isUpsertButtonEnabled, setIsUpsertButtonEnabled] = useState(false) const [isSyncNodesButtonEnabled, setIsSyncNodesButtonEnabled] = useState(false) + const [isSnappingEnabled, setIsSnappingEnabled] = useState(false) const reactFlowWrapper = useRef(null) + const [lastUpdatedDateTime, setLasUpdatedDateTime] = useState('') + const [chatflowName, setChatflowName] = useState('') + const [flowData, setFlowData] = useState('') + // ==============================|| Chatflow API ||============================== // const getNodesApi = useApi(nodesApi.getAllNodes) const createNewChatflowApi = useApi(chatflowsApi.createNewChatflow) const updateChatflowApi = useApi(chatflowsApi.updateChatflow) const getSpecificChatflowApi = useApi(chatflowsApi.getSpecificChatflow) + const getHasChatflowChangedApi = useApi(chatflowsApi.getHasChatflowChanged) // ==============================|| Events & Actions ||============================== // @@ -198,7 +207,7 @@ const Canvas = () => { } } - const handleSaveFlow = (chatflowName) => { + const handleSaveFlow = async (chatflowName) => { if (reactFlowInstance) { const nodes = reactFlowInstance.getNodes().map((node) => { const nodeData = cloneDeep(node.data) @@ -227,11 +236,9 @@ const Canvas = () => { } createNewChatflowApi.request(newChatflowBody) } else { - const updateBody = { - name: chatflowName, - flowData - } - updateChatflowApi.request(chatflow.id, updateBody) + setChatflowName(chatflowName) + setFlowData(flowData) + getHasChatflowChangedApi.request(chatflow.id, lastUpdatedDateTime) } } } @@ -401,7 +408,13 @@ const Canvas = () => { useEffect(() => { if (getSpecificChatflowApi.data) { const chatflow = getSpecificChatflowApi.data + const workspaceId = chatflow.workspaceId + if (!hasAssignedWorkspace(workspaceId)) { + navigate('/unauthorized') + return + } const initialFlow = chatflow.flowData ? JSON.parse(chatflow.flowData) : [] + setLasUpdatedDateTime(chatflow.updatedDate) setNodes(initialFlow.nodes || []) setEdges(initialFlow.edges || []) dispatch({ type: SET_CHATFLOW, chatflow }) @@ -420,7 +433,7 @@ const Canvas = () => { saveChatflowSuccess() window.history.replaceState(state, null, `/${isAgentCanvas ? 'agentcanvas' : 'canvas'}/${chatflow.id}`) } else if (createNewChatflowApi.error) { - errorFailed(`Failed to save ${canvasTitle}: ${createNewChatflowApi.error.response.data.message}`) + errorFailed(`Failed to retrieve ${canvasTitle}: ${createNewChatflowApi.error.response.data.message}`) } // eslint-disable-next-line react-hooks/exhaustive-deps @@ -430,14 +443,45 @@ const Canvas = () => { useEffect(() => { if (updateChatflowApi.data) { dispatch({ type: SET_CHATFLOW, chatflow: updateChatflowApi.data }) + setLasUpdatedDateTime(updateChatflowApi.data.updatedDate) saveChatflowSuccess() } else if (updateChatflowApi.error) { - errorFailed(`Failed to save ${canvasTitle}: ${updateChatflowApi.error.response.data.message}`) + errorFailed(`Failed to retrieve ${canvasTitle}: ${updateChatflowApi.error.response.data.message}`) } // eslint-disable-next-line react-hooks/exhaustive-deps }, [updateChatflowApi.data, updateChatflowApi.error]) + // check if chatflow has changed before saving + useEffect(() => { + const checkIfHasChanged = async () => { + if (getHasChatflowChangedApi.data?.hasChanged === true) { + const confirmPayload = { + title: `Confirm Change`, + description: `${canvasTitle} ${chatflow.name} has changed since you have opened, overwrite changes?`, + confirmButtonName: 'Confirm', + cancelButtonName: 'Cancel' + } + const isConfirmed = await confirm(confirmPayload) + + if (!isConfirmed) { + return + } + } + const updateBody = { + name: chatflowName, + flowData + } + updateChatflowApi.request(chatflow.id, updateBody) + } + + if (getHasChatflowChangedApi.data) { + checkIfHasChanged() + } + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getHasChatflowChangedApi.data, getHasChatflowChangedApi.error]) + useEffect(() => { setChatflow(canvasDataStore.chatflow) if (canvasDataStore.chatflow) { @@ -554,16 +598,30 @@ const Canvas = () => { fitView deleteKeyCode={canvas.canvasDialogShow ? null : ['Delete']} minZoom={0.1} + snapGrid={[25, 25]} + snapToGrid={isSnappingEnabled} className='chatflow-canvas' > + > + + {isSyncNodesButtonEnabled && ( diff --git a/packages/ui/src/views/chatbot/index.jsx b/packages/ui/src/views/chatbot/index.jsx index aa58936910c..1bf8b723d0b 100644 --- a/packages/ui/src/views/chatbot/index.jsx +++ b/packages/ui/src/views/chatbot/index.jsx @@ -1,9 +1,5 @@ import { useEffect, useState } from 'react' import { FullPageChat } from 'flowise-embed-react' -import { useNavigate } from 'react-router-dom' - -// Project import -import LoginDialog from '@/ui-component/dialog/LoginDialog' // API import chatflowsApi from '@/api/chatflows' @@ -24,60 +20,22 @@ import { baseURL } from '@/store/constant' const ChatbotFull = () => { const URLpath = document.location.pathname.toString().split('/') const chatflowId = URLpath[URLpath.length - 1] === 'chatbot' ? '' : URLpath[URLpath.length - 1] - const navigate = useNavigate() const theme = useTheme() const [chatflow, setChatflow] = useState(null) const [chatbotTheme, setChatbotTheme] = useState({}) - const [loginDialogOpen, setLoginDialogOpen] = useState(false) - const [loginDialogProps, setLoginDialogProps] = useState({}) const [isLoading, setLoading] = useState(true) const [chatbotOverrideConfig, setChatbotOverrideConfig] = useState({}) const getSpecificChatflowFromPublicApi = useApi(chatflowsApi.getSpecificChatflowFromPublicEndpoint) const getSpecificChatflowApi = useApi(chatflowsApi.getSpecificChatflow) - const onLoginClick = (username, password) => { - localStorage.setItem('username', username) - localStorage.setItem('password', password) - navigate(0) - } - useEffect(() => { getSpecificChatflowFromPublicApi.request(chatflowId) // eslint-disable-next-line react-hooks/exhaustive-deps }, []) - useEffect(() => { - if (getSpecificChatflowFromPublicApi.error) { - if (getSpecificChatflowFromPublicApi.error?.response?.status === 401) { - if (localStorage.getItem('username') && localStorage.getItem('password')) { - getSpecificChatflowApi.request(chatflowId) - } else { - setLoginDialogProps({ - title: 'Login', - confirmButtonName: 'Login' - }) - setLoginDialogOpen(true) - } - } - } - // eslint-disable-next-line react-hooks/exhaustive-deps - }, [getSpecificChatflowFromPublicApi.error]) - - useEffect(() => { - if (getSpecificChatflowApi.error) { - if (getSpecificChatflowApi.error?.response?.status === 401) { - setLoginDialogProps({ - title: 'Login', - confirmButtonName: 'Login' - }) - setLoginDialogOpen(true) - } - } - }, [getSpecificChatflowApi.error]) - useEffect(() => { if (getSpecificChatflowFromPublicApi.data || getSpecificChatflowApi.data) { const chatflowData = getSpecificChatflowFromPublicApi.data || getSpecificChatflowApi.data @@ -151,7 +109,6 @@ const ChatbotFull = () => { theme={{ chatWindow: chatbotTheme }} /> )} - ) : null} diff --git a/packages/ui/src/views/chatflows/APICodeDialog.jsx b/packages/ui/src/views/chatflows/APICodeDialog.jsx index a7ae54d9d21..ee7ac222339 100644 --- a/packages/ui/src/views/chatflows/APICodeDialog.jsx +++ b/packages/ui/src/views/chatflows/APICodeDialog.jsx @@ -1,6 +1,6 @@ import { createPortal } from 'react-dom' import { useNavigate } from 'react-router-dom' -import { useState, useEffect } from 'react' +import { useState, useEffect, useMemo } from 'react' import { useDispatch, useSelector } from 'react-redux' import PropTypes from 'prop-types' @@ -21,11 +21,13 @@ import { import { CopyBlock, atomOneDark } from 'react-code-blocks' import ExpandMoreIcon from '@mui/icons-material/ExpandMore' import { useTheme } from '@mui/material/styles' +import { useAuth } from '@/hooks/useAuth' // Project import import { Dropdown } from '@/ui-component/dropdown/Dropdown' import ShareChatbot from './ShareChatbot' import EmbedChat from './EmbedChat' +import { Available } from '@/ui-component/rbac/available' // Const import { baseURL } from '@/store/constant' @@ -93,7 +95,6 @@ const APICodeDialog = ({ show, dialogProps, onCancel }) => { const codes = ['Embed', 'Python', 'JavaScript', 'cURL', 'Share Chatbot'] const [value, setValue] = useState(0) - const [keyOptions, setKeyOptions] = useState([]) const [apiKeys, setAPIKeys] = useState([]) const [chatflowApiKeyId, setChatflowApiKeyId] = useState('') const [selectedApiKey, setSelectedApiKey] = useState({}) @@ -108,6 +109,36 @@ const APICodeDialog = ({ show, dialogProps, onCancel }) => { const getIsChatflowStreamingApi = useApi(chatflowsApi.getIsChatflowStreaming) const getConfigApi = useApi(configApi.getConfig) const getAllVariablesApi = useApi(variablesApi.getAllVariables) + const isGlobal = useSelector((state) => state.auth.isGlobal) + const { hasPermission } = useAuth() + + // Memoize keyOptions to prevent recreation on hover + const keyOptions = useMemo(() => { + if (!getAllAPIKeysApi.data) return [] + + const options = [ + { + label: 'No Authorization', + name: '' + } + ] + + for (const key of getAllAPIKeysApi.data) { + options.push({ + label: key.keyName, + name: key.id + }) + } + + if (isGlobal || hasPermission('apikeys:create')) { + options.push({ + label: '- Add New Key -', + name: 'addnewkey' + }) + } + + return options + }, [getAllAPIKeysApi.data, isGlobal, hasPermission]) const onCheckBoxChanged = (newVal) => { setCheckbox(newVal) @@ -123,7 +154,8 @@ const APICodeDialog = ({ show, dialogProps, onCancel }) => { return } setChatflowApiKeyId(keyValue) - setSelectedApiKey(apiKeys.find((key) => key.id === keyValue)) + const selectedKey = apiKeys.find((key) => key.id === keyValue) + setSelectedApiKey(selectedKey || {}) const updateBody = { apikeyid: keyValue } @@ -628,23 +660,6 @@ formData.append("openAIApiKey[openAIEmbeddings_0]", "sk-my-openai-2nd-key")` useEffect(() => { if (getAllAPIKeysApi.data) { - const options = [ - { - label: 'No Authorization', - name: '' - } - ] - for (const key of getAllAPIKeysApi.data) { - options.push({ - label: key.keyName, - name: key.id - }) - } - options.push({ - label: '- Add New Key -', - name: 'addnewkey' - }) - setKeyOptions(options) setAPIKeys(getAllAPIKeysApi.data) if (dialogProps.chatflowApiKeyId) { @@ -693,13 +708,15 @@ formData.append("openAIApiKey[openAIEmbeddings_0]", "sk-my-openai-2nd-key")`
    - onApiKeySelected(newValue)} - value={dialogProps.chatflowApiKeyId ?? chatflowApiKeyId ?? 'Choose an API key'} - /> + + onApiKeySelected(newValue)} + value={dialogProps.chatflowApiKeyId ?? chatflowApiKeyId ?? 'Choose an API key'} + /> +
    diff --git a/packages/ui/src/views/chatflows/ShareChatbot.jsx b/packages/ui/src/views/chatflows/ShareChatbot.jsx index 22b3ec1aa46..f0ca8e9b886 100644 --- a/packages/ui/src/views/chatflows/ShareChatbot.jsx +++ b/packages/ui/src/views/chatflows/ShareChatbot.jsx @@ -8,8 +8,9 @@ import { Card, Box, Typography, Button, Switch, OutlinedInput, Popover, Stack, I import { useTheme } from '@mui/material/styles' // Project import -import { StyledButton } from '@/ui-component/button/StyledButton' import { TooltipWithParser } from '@/ui-component/tooltip/TooltipWithParser' +import { Available } from '@/ui-component/rbac/available' +import { StyledPermissionButton } from '@/ui-component/button/RBACButtons' // Icons import { IconX, IconCopy, IconArrowUpRightCircle } from '@tabler/icons-react' @@ -444,20 +445,22 @@ const ShareChatbot = ({ isSessionMemory, isAgentCanvas }) => {
    -
    - { - setChatflowIsPublic(event.target.checked) - onSwitchChange(event.target.checked) - }} - /> - Make Public - -
    + +
    + { + setChatflowIsPublic(event.target.checked) + onSwitchChange(event.target.checked) + }} + /> + Make Public + +
    +
    @@ -533,7 +536,8 @@ const ShareChatbot = ({ isSessionMemory, isAgentCanvas }) => { {colorField(textInputSendButtonColor, 'textInputSendButtonColor', 'TextIntput Send Button Color')} - { onClick={() => onSave()} > Save Changes - + { const theme = useTheme() const [isLoading, setLoading] = useState(true) - const [error, setError] = useState(null) const [images, setImages] = useState({}) const [search, setSearch] = useState('') - const [loginDialogOpen, setLoginDialogOpen] = useState(false) - const [loginDialogProps, setLoginDialogProps] = useState({}) + const { error, setError } = useError() const getAllChatflowsApi = useApi(chatflowsApi.getAllChatflows) const [view, setView] = useState(localStorage.getItem('flowDisplayStyle') || 'card') @@ -57,18 +55,12 @@ const Chatflows = () => { function filterFlows(data) { return ( - data.name.toLowerCase().indexOf(search.toLowerCase()) > -1 || + data?.name.toLowerCase().indexOf(search.toLowerCase()) > -1 || (data.category && data.category.toLowerCase().indexOf(search.toLowerCase()) > -1) || - data.id.toLowerCase().indexOf(search.toLowerCase()) > -1 + data?.id.toLowerCase().indexOf(search.toLowerCase()) > -1 ) } - const onLoginClick = (username, password) => { - localStorage.setItem('username', username) - localStorage.setItem('password', password) - navigate(0) - } - const addNew = () => { navigate('/canvas') } @@ -79,24 +71,9 @@ const Chatflows = () => { useEffect(() => { getAllChatflowsApi.request() - // eslint-disable-next-line react-hooks/exhaustive-deps }, []) - useEffect(() => { - if (getAllChatflowsApi.error) { - if (getAllChatflowsApi.error?.response?.status === 401) { - setLoginDialogProps({ - title: 'Login', - confirmButtonName: 'Login' - }) - setLoginDialogOpen(true) - } else { - setError(getAllChatflowsApi.error) - } - } - }, [getAllChatflowsApi.error]) - useEffect(() => { setLoading(getAllChatflowsApi.loading) }, [getAllChatflowsApi.loading]) @@ -112,9 +89,13 @@ const Chatflows = () => { const nodes = flowData.nodes || [] images[chatflows[i].id] = [] for (let j = 0; j < nodes.length; j += 1) { + if (nodes[j].data.name === 'stickyNote' || nodes[j].data.name === 'stickyNoteAgentflow') continue const imageSrc = `${baseURL}/api/v1/node-icon/${nodes[j].data.name}` - if (!images[chatflows[i].id].includes(imageSrc)) { - images[chatflows[i].id].push(imageSrc) + if (!images[chatflows[i].id].some((img) => img.imageSrc === imageSrc)) { + images[chatflows[i].id].push({ + imageSrc, + label: nodes[j].data.label + }) } } } @@ -170,9 +151,15 @@ const Chatflows = () => { - } sx={{ borderRadius: 2, height: 40 }}> + } + sx={{ borderRadius: 2, height: 40 }} + > Add New - + {!view || view === 'card' ? ( <> @@ -184,9 +171,17 @@ const Chatflows = () => { ) : ( - {getAllChatflowsApi.data?.filter(filterFlows).map((data, index) => ( - goToCanvas(data)} data={data} images={images[data.id]} /> - ))} + {getAllChatflowsApi.data && + getAllChatflowsApi.data + ?.filter(filterFlows) + .map((data, index) => ( + goToCanvas(data)} + data={data} + images={images[data.id]} + /> + ))} )} @@ -214,8 +209,6 @@ const Chatflows = () => { )} )} - - ) diff --git a/packages/ui/src/views/chatmessage/ChatMessage.jsx b/packages/ui/src/views/chatmessage/ChatMessage.jsx index d8c4a66ea86..4ad20ec1e8c 100644 --- a/packages/ui/src/views/chatmessage/ChatMessage.jsx +++ b/packages/ui/src/views/chatmessage/ChatMessage.jsx @@ -963,8 +963,6 @@ const ChatMessage = ({ open, chatflowid, isAgentCanvas, isDialog, previews, setP const fetchResponseFromEventStream = async (chatflowid, params) => { const chatId = params.chatId const input = params.question - const username = localStorage.getItem('username') - const password = localStorage.getItem('password') params.streaming = true await fetchEventSource(`${baseURL}/api/v1/internal-prediction/${chatflowid}`, { openWhenHidden: true, @@ -972,7 +970,6 @@ const ChatMessage = ({ open, chatflowid, isAgentCanvas, isDialog, previews, setP body: JSON.stringify(params), headers: { 'Content-Type': 'application/json', - Authorization: username && password ? `Basic ${btoa(`${username}:${password}`)}` : undefined, 'x-request-from': 'internal' }, async onopen(response) { diff --git a/packages/ui/src/views/credentials/AddEditCredentialDialog.jsx b/packages/ui/src/views/credentials/AddEditCredentialDialog.jsx index 13cddd16222..abd025a6930 100644 --- a/packages/ui/src/views/credentials/AddEditCredentialDialog.jsx +++ b/packages/ui/src/views/credentials/AddEditCredentialDialog.jsx @@ -14,7 +14,7 @@ import ConfirmDialog from '@/ui-component/dialog/ConfirmDialog' import CredentialInputHandler from './CredentialInputHandler' // Icons -import { IconX } from '@tabler/icons-react' +import { IconHandStop, IconX } from '@tabler/icons-react' // API import credentialsApi from '@/api/credentials' @@ -50,17 +50,22 @@ const AddEditCredentialDialog = ({ show, dialogProps, onCancel, onConfirm, setEr const [name, setName] = useState('') const [credentialData, setCredentialData] = useState({}) const [componentCredential, setComponentCredential] = useState({}) + const [shared, setShared] = useState(false) useEffect(() => { if (getSpecificCredentialApi.data) { - setCredential(getSpecificCredentialApi.data) - if (getSpecificCredentialApi.data.name) { - setName(getSpecificCredentialApi.data.name) - } - if (getSpecificCredentialApi.data.plainDataObj) { - setCredentialData(getSpecificCredentialApi.data.plainDataObj) + const shared = getSpecificCredentialApi.data.shared + setShared(shared) + if (!shared) { + setCredential(getSpecificCredentialApi.data) + if (getSpecificCredentialApi.data.name) { + setName(getSpecificCredentialApi.data.name) + } + if (getSpecificCredentialApi.data.plainDataObj) { + setCredentialData(getSpecificCredentialApi.data.plainDataObj) + } + getSpecificComponentCredentialApi.request(getSpecificCredentialApi.data.credentialName) } - getSpecificComponentCredentialApi.request(getSpecificCredentialApi.data.credentialName) } // eslint-disable-next-line react-hooks/exhaustive-deps @@ -217,7 +222,7 @@ const AddEditCredentialDialog = ({ show, dialogProps, onCancel, onConfirm, setEr aria-describedby='alert-dialog-description' > - {componentCredential && componentCredential.label && ( + {!shared && componentCredential && componentCredential.label && (
    - {componentCredential && componentCredential.description && ( + {shared && ( +
    +
    + + Cannot edit shared credential. +
    +
    + )} + {!shared && componentCredential && componentCredential.description && (
    )} - {componentCredential && componentCredential.label && ( + {!shared && componentCredential && componentCredential.label && ( @@ -286,20 +315,23 @@ const AddEditCredentialDialog = ({ show, dialogProps, onCancel, onConfirm, setEr /> )} - {componentCredential && + {!shared && + componentCredential && componentCredential.inputs && componentCredential.inputs.map((inputParam, index) => ( ))} - (dialogProps.type === 'ADD' ? addNewCredential() : saveCredential())} - > - {dialogProps.confirmButtonName} - + {!shared && ( + (dialogProps.type === 'ADD' ? addNewCredential() : saveCredential())} + > + {dialogProps.confirmButtonName} + + )} diff --git a/packages/ui/src/views/credentials/index.jsx b/packages/ui/src/views/credentials/index.jsx index 266bbd47a87..7bd0fe1158d 100644 --- a/packages/ui/src/views/credentials/index.jsx +++ b/packages/ui/src/views/credentials/index.jsx @@ -18,16 +18,17 @@ import { TableHead, TableRow, Paper, - IconButton, useTheme } from '@mui/material' // project imports import MainCard from '@/ui-component/cards/MainCard' -import { StyledButton } from '@/ui-component/button/StyledButton' +import { PermissionIconButton, StyledPermissionButton } from '@/ui-component/button/RBACButtons' import CredentialListDialog from './CredentialListDialog' import ConfirmDialog from '@/ui-component/dialog/ConfirmDialog' import AddEditCredentialDialog from './AddEditCredentialDialog' +import ViewHeader from '@/layout/MainLayout/ViewHeader' +import ErrorBoundary from '@/ErrorBoundary' // API import credentialsApi from '@/api/credentials' @@ -40,15 +41,15 @@ import useConfirm from '@/hooks/useConfirm' import useNotifier from '@/utils/useNotifier' // Icons -import { IconTrash, IconEdit, IconX, IconPlus } from '@tabler/icons-react' +import { IconTrash, IconEdit, IconX, IconPlus, IconShare } from '@tabler/icons-react' import CredentialEmptySVG from '@/assets/images/credential_empty.svg' import keySVG from '@/assets/images/key.svg' // const import { baseURL } from '@/store/constant' import { SET_COMPONENT_CREDENTIALS } from '@/store/actions' -import ViewHeader from '@/layout/MainLayout/ViewHeader' -import ErrorBoundary from '@/ErrorBoundary' +import { useError } from '@/store/context/ErrorContext' +import ShareWithWorkspaceDialog from '@/ui-component/dialog/ShareWithWorkspaceDialog' const StyledTableCell = styled(TableCell)(({ theme }) => ({ borderColor: theme.palette.grey[900] + 25, @@ -77,12 +78,12 @@ const Credentials = () => { const customization = useSelector((state) => state.customization) const dispatch = useDispatch() useNotifier() + const { error, setError } = useError() const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) const [isLoading, setLoading] = useState(true) - const [error, setError] = useState(null) const [showCredentialListDialog, setShowCredentialListDialog] = useState(false) const [credentialListDialogProps, setCredentialListDialogProps] = useState({}) const [showSpecificCredentialDialog, setShowSpecificCredentialDialog] = useState(false) @@ -90,6 +91,9 @@ const Credentials = () => { const [credentials, setCredentials] = useState([]) const [componentsCredentials, setComponentsCredentials] = useState([]) + const [showShareCredentialDialog, setShowShareCredentialDialog] = useState(false) + const [shareCredentialDialogProps, setShareCredentialDialogProps] = useState({}) + const { confirm } = useConfirm() const getAllCredentialsApi = useApi(credentialsApi.getAllCredentials) @@ -134,6 +138,22 @@ const Credentials = () => { setShowSpecificCredentialDialog(true) } + const share = (credential) => { + const dialogProps = { + type: 'EDIT', + cancelButtonName: 'Cancel', + confirmButtonName: 'Share', + data: { + id: credential.id, + name: credential.name, + title: 'Share Credential', + itemType: 'credential' + } + } + setShareCredentialDialogProps(dialogProps) + setShowShareCredentialDialog(true) + } + const deleteCredential = async (credential) => { const confirmPayload = { title: `Delete`, @@ -177,7 +197,6 @@ const Credentials = () => { ) } }) - onCancel() } } } @@ -209,12 +228,6 @@ const Credentials = () => { } }, [getAllCredentialsApi.data]) - useEffect(() => { - if (getAllCredentialsApi.error) { - setError(getAllCredentialsApi.error) - } - }, [getAllCredentialsApi.error]) - useEffect(() => { if (getAllComponentsCredentialsApi.data) { setComponentsCredentials(getAllComponentsCredentialsApi.data) @@ -236,14 +249,15 @@ const Credentials = () => { title='Credentials' description='API keys, tokens, and secrets for 3rd party integrations' > - } > Add Credential - + {!isLoading && credentials.length <= 0 ? ( @@ -274,8 +288,9 @@ const Credentials = () => { Name Last Updated Created - - + + + @@ -297,6 +312,9 @@ const Credentials = () => { + + + @@ -314,6 +332,9 @@ const Credentials = () => { + + + ) : ( @@ -364,20 +385,46 @@ const Credentials = () => { {moment(credential.createdDate).format('MMMM Do, YYYY HH:mm:ss')} - - edit(credential)}> - - - - - deleteCredential(credential)} - > - - - + {!credential.shared && ( + <> + + share(credential)} + > + + + + + edit(credential)} + > + + + + + deleteCredential(credential)} + > + + + + + )} + {credential.shared && ( + <> + Shared Credential + + )} ))} @@ -395,13 +442,23 @@ const Credentials = () => { onCancel={() => setShowCredentialListDialog(false)} onCredentialSelected={onCredentialSelected} > - setShowSpecificCredentialDialog(false)} - onConfirm={onConfirm} - setError={setError} - > + {showSpecificCredentialDialog && ( + setShowSpecificCredentialDialog(false)} + onConfirm={onConfirm} + setError={setError} + > + )} + {showShareCredentialDialog && ( + setShowShareCredentialDialog(false)} + setError={setError} + > + )} ) diff --git a/packages/ui/src/views/datasets/AddEditDatasetDialog.jsx b/packages/ui/src/views/datasets/AddEditDatasetDialog.jsx new file mode 100644 index 00000000000..babb70eb5bb --- /dev/null +++ b/packages/ui/src/views/datasets/AddEditDatasetDialog.jsx @@ -0,0 +1,270 @@ +import { createPortal } from 'react-dom' +import PropTypes from 'prop-types' +import { useState, useEffect } from 'react' +import { useDispatch } from 'react-redux' +import { enqueueSnackbar as enqueueSnackbarAction, closeSnackbar as closeSnackbarAction } from '@/store/actions' + +// Material +import { Button, Dialog, DialogActions, DialogContent, DialogTitle, Box, Typography, OutlinedInput } from '@mui/material' + +// Project imports +import { StyledButton } from '@/ui-component/button/StyledButton' +import ConfirmDialog from '@/ui-component/dialog/ConfirmDialog' +import { File } from '@/ui-component/file/File' +import { SwitchInput } from '@/ui-component/switch/Switch' +import { TooltipWithParser } from '@/ui-component/tooltip/TooltipWithParser' + +// Icons +import { IconX, IconDatabase } from '@tabler/icons-react' + +// API +import datasetApi from '@/api/dataset' + +// Hooks + +// utils +import useNotifier from '@/utils/useNotifier' + +// const +import { HIDE_CANVAS_DIALOG, SHOW_CANVAS_DIALOG } from '@/store/actions' +const CSVFORMAT = `Only the first 2 columns will be considered: +---------------------------- +| Input | Output | +---------------------------- +| test input | test output | +---------------------------- +` + +const AddEditDatasetDialog = ({ show, dialogProps, onCancel, onConfirm }) => { + const portalElement = document.getElementById('portal') + + const dispatch = useDispatch() + + // ==============================|| Snackbar ||============================== // + + useNotifier() + + const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) + const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) + + const [datasetName, setDatasetName] = useState('') + const [datasetDescription, setDatasetDescription] = useState('') + const [dialogType, setDialogType] = useState('ADD') + const [dataset, setDataset] = useState({}) + const [firstRowHeaders, setFirstRowHeaders] = useState(false) + const [selectedFile, setSelectedFile] = useState() + + useEffect(() => { + if (dialogProps.type === 'EDIT' && dialogProps.data) { + setDatasetName(dialogProps.data.name) + setDatasetDescription(dialogProps.data.description) + setDialogType('EDIT') + setDataset(dialogProps.data) + } else if (dialogProps.type === 'ADD') { + setDatasetName('') + setDatasetDescription('') + setDialogType('ADD') + setDataset({}) + } + + return () => { + setDatasetName('') + setDatasetDescription('') + setDialogType('ADD') + setDataset({}) + } + }, [dialogProps]) + + useEffect(() => { + if (show) dispatch({ type: SHOW_CANVAS_DIALOG }) + else dispatch({ type: HIDE_CANVAS_DIALOG }) + return () => dispatch({ type: HIDE_CANVAS_DIALOG }) + }, [show, dispatch]) + + const addNewDataset = async () => { + try { + const obj = { + name: datasetName, + description: datasetDescription + } + if (selectedFile) { + obj.firstRowHeaders = firstRowHeaders + obj.csvFile = selectedFile + } + const createResp = await datasetApi.createDataset(obj) + if (createResp.data) { + enqueueSnackbar({ + message: 'New Dataset added', + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + onConfirm(createResp.data.id) + } + } catch (error) { + enqueueSnackbar({ + message: `Failed to add new Dataset: ${ + typeof error.response.data === 'object' ? error.response.data.message : error.response.data + }`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + onCancel() + } + } + + const saveDataset = async () => { + try { + const saveObj = { + name: datasetName, + description: datasetDescription + } + + const saveResp = await datasetApi.updateDataset(dataset.id, saveObj) + if (saveResp.data) { + enqueueSnackbar({ + message: 'Dataset saved', + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + onConfirm(saveResp.data.id) + } + } catch (error) { + enqueueSnackbar({ + message: `Failed to save Dataset: ${ + typeof error.response.data === 'object' ? error.response.data.message : error.response.data + }`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + onCancel() + } + } + + const component = show ? ( + + +
    + + {dialogProps.type === 'ADD' ? 'Add Dataset' : 'Edit Dataset'} +
    +
    + + +
    + + Name * + +
    +
    + setDatasetName(e.target.value)} + value={datasetName ?? ''} + /> +
    + +
    + Description +
    +
    + setDatasetDescription(e.target.value)} + value={datasetDescription ?? ''} + /> +
    + {dialogType === 'ADD' && ( + +
    + + Upload CSV + ${CSVFORMAT}`} /> + +
    +
    + setSelectedFile(newValue)} + value={selectedFile ?? 'Choose a file to upload'} + /> + +
    + )} +
    + + + (dialogType === 'ADD' ? addNewDataset() : saveDataset())} + > + {dialogProps.confirmButtonName} + + + +
    + ) : null + + return createPortal(component, portalElement) +} + +AddEditDatasetDialog.propTypes = { + show: PropTypes.bool, + dialogProps: PropTypes.object, + onCancel: PropTypes.func, + onConfirm: PropTypes.func +} + +export default AddEditDatasetDialog diff --git a/packages/ui/src/views/datasets/AddEditDatasetRowDialog.jsx b/packages/ui/src/views/datasets/AddEditDatasetRowDialog.jsx new file mode 100644 index 00000000000..920a33a6c25 --- /dev/null +++ b/packages/ui/src/views/datasets/AddEditDatasetRowDialog.jsx @@ -0,0 +1,244 @@ +import { createPortal } from 'react-dom' +import PropTypes from 'prop-types' +import { useState, useEffect } from 'react' +import { useDispatch } from 'react-redux' +import { enqueueSnackbar as enqueueSnackbarAction, closeSnackbar as closeSnackbarAction } from '@/store/actions' + +// Material +import { Button, Dialog, DialogActions, DialogContent, DialogTitle, Box, Typography, OutlinedInput } from '@mui/material' + +// Project imports +import { StyledPermissionButton } from '@/ui-component/button/RBACButtons' +import ConfirmDialog from '@/ui-component/dialog/ConfirmDialog' + +// Icons +import { IconX, IconDatabase } from '@tabler/icons-react' + +// API +import datasetApi from '@/api/dataset' + +// Hooks + +// utils +import useNotifier from '@/utils/useNotifier' + +// const +import { HIDE_CANVAS_DIALOG, SHOW_CANVAS_DIALOG } from '@/store/actions' + +const AddEditDatasetRowDialog = ({ show, dialogProps, onCancel, onConfirm }) => { + const portalElement = document.getElementById('portal') + + const dispatch = useDispatch() + + // ==============================|| Snackbar ||============================== // + + useNotifier() + + const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) + const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) + + const [datasetId, setDatasetId] = useState('') + const [datasetName, setDatasetName] = useState('') + const [input, setInput] = useState('') + const [output, setOutput] = useState('') + const [dialogType, setDialogType] = useState('ADD') + const [row, setRow] = useState({}) + + useEffect(() => { + if (dialogProps.type === 'EDIT' && dialogProps.data) { + setDatasetId(dialogProps.data.datasetId) + setDatasetName(dialogProps.data.datasetName) + setDialogType('EDIT') + setRow(dialogProps.data) + setInput(dialogProps.data.input) + setOutput(dialogProps.data.output) + } else if (dialogProps.type === 'ADD') { + setDatasetId(dialogProps.data.datasetId) + setDatasetName(dialogProps.data.datasetName) + setDialogType('ADD') + setRow({}) + } + + return () => { + setInput('') + setOutput('') + setDialogType('ADD') + setRow({}) + } + }, [dialogProps]) + + useEffect(() => { + if (show) dispatch({ type: SHOW_CANVAS_DIALOG }) + else dispatch({ type: HIDE_CANVAS_DIALOG }) + return () => dispatch({ type: HIDE_CANVAS_DIALOG }) + }, [show, dispatch]) + + const addNewDatasetRow = async () => { + try { + const obj = { + datasetId: datasetId, + input: input, + output: output + } + const createResp = await datasetApi.createDatasetRow(obj) + if (createResp.data) { + enqueueSnackbar({ + message: 'New Row added for the given Dataset', + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + onConfirm(createResp.data.id) + } + } catch (error) { + enqueueSnackbar({ + message: `Failed to add new row in the Dataset: ${ + typeof error.response.data === 'object' ? error.response.data.message : error.response.data + }`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + onCancel() + } + } + + const saveDatasetRow = async () => { + try { + const saveObj = { + input: input, + output: output + } + + const saveResp = await datasetApi.updateDatasetRow(row.id, saveObj) + if (saveResp.data) { + enqueueSnackbar({ + message: 'Dataset Row saved', + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + onConfirm(saveResp.data.id) + } + } catch (error) { + enqueueSnackbar({ + message: `Failed to save Dataset Row: ${ + typeof error.response.data === 'object' ? error.response.data.message : error.response.data + }`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + onCancel() + } + } + + const component = show ? ( + + +
    + + {dialogProps.type === 'ADD' ? `Add Item to ${datasetName} Dataset` : `Edit Item in ${datasetName} Dataset`} +
    +
    + + +
    + + Input * + + +
    +
    + setInput(e.target.value)} + value={input ?? ''} + /> +
    + +
    + + Anticipated Output * + + +
    +
    + setOutput(e.target.value)} + value={output ?? ''} + /> +
    +
    + + + (dialogType === 'ADD' ? addNewDatasetRow() : saveDatasetRow())} + > + {dialogProps.confirmButtonName} + + + +
    + ) : null + + return createPortal(component, portalElement) +} + +AddEditDatasetRowDialog.propTypes = { + show: PropTypes.bool, + dialogProps: PropTypes.object, + onCancel: PropTypes.func, + onConfirm: PropTypes.func +} + +export default AddEditDatasetRowDialog diff --git a/packages/ui/src/views/datasets/DatasetItems.jsx b/packages/ui/src/views/datasets/DatasetItems.jsx new file mode 100644 index 00000000000..bff236aff68 --- /dev/null +++ b/packages/ui/src/views/datasets/DatasetItems.jsx @@ -0,0 +1,487 @@ +import { useEffect, useRef, useState } from 'react' +import React from 'react' +import { useDispatch, useSelector } from 'react-redux' + +// material-ui +import { + Checkbox, + Skeleton, + Box, + TableRow, + TableContainer, + Paper, + Table, + TableHead, + TableBody, + Button, + Stack, + Typography +} from '@mui/material' +import { useTheme } from '@mui/material/styles' + +// project imports +import MainCard from '@/ui-component/cards/MainCard' +import { StyledTableCell, StyledTableRow } from '@/ui-component/table/TableStyles' +import ConfirmDialog from '@/ui-component/dialog/ConfirmDialog' +import AddEditDatasetRowDialog from './AddEditDatasetRowDialog' +import UploadCSVFileDialog from '@/views/datasets/UploadCSVFileDialog' +import ErrorBoundary from '@/ErrorBoundary' +import ViewHeader from '@/layout/MainLayout/ViewHeader' +import { PermissionButton, StyledPermissionButton } from '@/ui-component/button/RBACButtons' +import AddEditDatasetDialog from '@/views/datasets/AddEditDatasetDialog' + +// API +import datasetsApi from '@/api/dataset' + +// Hooks +import useApi from '@/hooks/useApi' +import { closeSnackbar as closeSnackbarAction, enqueueSnackbar as enqueueSnackbarAction } from '@/store/actions' +import useNotifier from '@/utils/useNotifier' +import useConfirm from '@/hooks/useConfirm' +import { useAuth } from '@/hooks/useAuth' + +// icons +import empty_datasetSVG from '@/assets/images/empty_datasets.svg' +import { IconTrash, IconPlus, IconX, IconUpload, IconArrowsDownUp } from '@tabler/icons-react' +import DragIndicatorIcon from '@mui/icons-material/DragIndicator' + +import { useError } from '@/store/context/ErrorContext' + +// ==============================|| Dataset Items ||============================== // + +const EvalDatasetRows = () => { + const theme = useTheme() + const customization = useSelector((state) => state.customization) + const dispatch = useDispatch() + useNotifier() + const { error } = useError() + + const [showRowDialog, setShowRowDialog] = useState(false) + const [showUploadDialog, setShowUploadDialog] = useState(false) + const [rowDialogProps, setRowDialogProps] = useState({}) + const [showDatasetDialog, setShowDatasetDialog] = useState(false) + const [datasetDialogProps, setDatasetDialogProps] = useState({}) + + const [dataset, setDataset] = useState([]) + const [isLoading, setLoading] = useState(true) + const [selected, setSelected] = useState([]) + + const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) + const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) + + const { confirm } = useConfirm() + + const getDatasetRows = useApi(datasetsApi.getDataset) + const reorderDatasetRowApi = useApi(datasetsApi.reorderDatasetRow) + + const URLpath = document.location.pathname.toString().split('/') + const datasetId = URLpath[URLpath.length - 1] === 'dataset_rows' ? '' : URLpath[URLpath.length - 1] + + const { hasPermission } = useAuth() + + const draggingItem = useRef() + const dragOverItem = useRef() + const [Draggable, setDraggable] = useState(false) + const [startDragPos, setStartDragPos] = useState(-1) + const [endDragPos, setEndDragPos] = useState(-1) + + const handleDragStart = (e, position) => { + draggingItem.current = position + setStartDragPos(position) + setEndDragPos(-1) + } + const handleDragEnter = (e, position) => { + setEndDragPos(position) + dragOverItem.current = position + } + + const handleDragEnd = (e, position) => { + dragOverItem.current = position + const updatedDataset = { ...dataset } + updatedDataset.rows.splice(endDragPos, 0, dataset.rows.splice(startDragPos, 1)[0]) + setDataset({ ...updatedDataset }) + e.preventDefault() + const updatedRows = [] + + dataset.rows.map((item, index) => { + updatedRows.push({ + id: item.id, + sequenceNo: index + }) + }) + reorderDatasetRowApi.request({ datasetId: datasetId, rows: updatedRows }) + } + + const onSelectAllClick = (event) => { + if (event.target.checked) { + const newSelected = (dataset?.rows || []).map((n) => n.id) + setSelected(newSelected) + return + } + setSelected([]) + } + + const handleSelect = (event, id) => { + const selectedIndex = selected.indexOf(id) + let newSelected = [] + + if (selectedIndex === -1) { + newSelected = newSelected.concat(selected, id) + } else if (selectedIndex === 0) { + newSelected = newSelected.concat(selected.slice(1)) + } else if (selectedIndex === selected.length - 1) { + newSelected = newSelected.concat(selected.slice(0, -1)) + } else if (selectedIndex > 0) { + newSelected = newSelected.concat(selected.slice(0, selectedIndex), selected.slice(selectedIndex + 1)) + } + setSelected(newSelected) + } + + const addNew = () => { + const dialogProp = { + type: 'ADD', + cancelButtonName: 'Cancel', + confirmButtonName: 'Add', + data: { + datasetId: datasetId, + datasetName: dataset.name + } + } + setRowDialogProps(dialogProp) + setShowRowDialog(true) + } + + const uploadCSV = () => { + const dialogProp = { + type: 'ADD', + cancelButtonName: 'Cancel', + confirmButtonName: 'Upload', + data: { + datasetId: datasetId, + datasetName: dataset.name + } + } + setRowDialogProps(dialogProp) + setShowUploadDialog(true) + } + + const editDs = () => { + const dialogProp = { + type: 'EDIT', + cancelButtonName: 'Cancel', + confirmButtonName: 'Save', + data: dataset + } + setDatasetDialogProps(dialogProp) + setShowDatasetDialog(true) + } + + const edit = (item) => { + const dialogProp = { + type: 'EDIT', + cancelButtonName: 'Cancel', + confirmButtonName: 'Save', + data: { + datasetName: dataset.name, + ...item + } + } + setRowDialogProps(dialogProp) + setShowRowDialog(true) + } + + const deleteDatasetItems = async () => { + const confirmPayload = { + title: `Delete`, + description: `Delete ${selected.length} dataset items?`, + confirmButtonName: 'Delete', + cancelButtonName: 'Cancel' + } + const isConfirmed = await confirm(confirmPayload) + + if (isConfirmed) { + try { + const deleteResp = await datasetsApi.deleteDatasetItems(selected) + if (deleteResp.data) { + enqueueSnackbar({ + message: 'Dataset Items deleted', + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + onConfirm() + } + } catch (error) { + enqueueSnackbar({ + message: `Failed to delete dataset items: ${ + typeof error.response.data === 'object' ? error.response.data.message : error.response.data + }`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + } + setSelected([]) + } + } + + const onConfirm = () => { + setShowRowDialog(false) + setShowUploadDialog(false) + setShowDatasetDialog(false) + getDatasetRows.request(datasetId) + } + + useEffect(() => { + getDatasetRows.request(datasetId) + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []) + + useEffect(() => { + if (getDatasetRows.data) { + const dataset = getDatasetRows.data + setDataset(dataset) + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getDatasetRows.data]) + + useEffect(() => { + setLoading(getDatasetRows.loading) + }, [getDatasetRows.loading]) + + return ( + <> + + {error ? ( + + ) : ( + + window.history.back()} + search={false} + title={`Dataset : ${dataset?.name || ''}`} + description={dataset?.description} + > + } + > + Upload CSV + + } + > + New Item + + + {selected.length > 0 && ( + } + > + Delete {selected.length} {selected.length === 1 ? 'item' : 'items'} + + )} + {!isLoading && dataset?.rows?.length <= 0 ? ( + + + empty_datasetSVG + +
    No Dataset Items Yet
    + } + onClick={addNew} + > + New Item + +
    + ) : ( + + + + + + + + + Input + Expected Output + + + + + + + {isLoading ? ( + <> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ) : ( + <> + {(dataset?.rows || []).map((item, index) => ( + handleDragStart(e, index)} + onDragOver={(e) => e.preventDefault()} + onDragEnter={(e) => handleDragEnter(e, index)} + onDragEnd={(e) => handleDragEnd(e, index)} + hover + key={index} + sx={{ cursor: 'pointer', '&:last-child td, &:last-child th': { border: 0 } }} + > + setDraggable(false)} + onMouseUp={() => setDraggable(true)} + > + handleSelect(event, item.id)} + inputProps={{ + 'aria-labelledby': item.id + }} + /> + + edit(item)} + onMouseDown={() => setDraggable(false)} + onMouseUp={() => setDraggable(true)} + > + {item.input} + + edit(item)} + onMouseDown={() => setDraggable(false)} + onMouseUp={() => setDraggable(true)} + > + {item.output} + + + setDraggable(true)} + onMouseUp={() => setDraggable(false)} + /> + + + ))} + + )} + +
    +
    + + Use the drag icon at (extreme right) to reorder the dataset items + +
    + )} +
    + )} +
    + setShowRowDialog(false)} + onConfirm={onConfirm} + > + {showUploadDialog && ( + setShowUploadDialog(false)} + onConfirm={onConfirm} + > + )} + {showDatasetDialog && ( + setShowDatasetDialog(false)} + onConfirm={onConfirm} + > + )} + + + ) +} + +export default EvalDatasetRows diff --git a/packages/ui/src/views/datasets/UploadCSVFileDialog.jsx b/packages/ui/src/views/datasets/UploadCSVFileDialog.jsx new file mode 100644 index 00000000000..355a0176725 --- /dev/null +++ b/packages/ui/src/views/datasets/UploadCSVFileDialog.jsx @@ -0,0 +1,196 @@ +import { createPortal } from 'react-dom' +import PropTypes from 'prop-types' +import { useState, useEffect } from 'react' +import { useDispatch } from 'react-redux' +import { enqueueSnackbar as enqueueSnackbarAction, closeSnackbar as closeSnackbarAction } from '@/store/actions' + +// Material +import { Button, Dialog, DialogActions, DialogContent, DialogTitle, Box, Typography } from '@mui/material' + +// Project imports +import { StyledButton } from '@/ui-component/button/StyledButton' +import ConfirmDialog from '@/ui-component/dialog/ConfirmDialog' +import { SwitchInput } from '@/ui-component/switch/Switch' +import { File } from '@/ui-component/file/File' +import { TooltipWithParser } from '@/ui-component/tooltip/TooltipWithParser' + +// Icons +import { IconX, IconDatabase } from '@tabler/icons-react' + +// API +import datasetApi from '@/api/dataset' + +// utils +import useNotifier from '@/utils/useNotifier' + +// const +import { HIDE_CANVAS_DIALOG, SHOW_CANVAS_DIALOG } from '@/store/actions' +const CSVFORMAT = `Only the first 2 columns will be considered: +---------------------------- +| Input | Output | +---------------------------- +| test input | test output | +---------------------------- +` + +const UploadCSVFileDialog = ({ show, dialogProps, onCancel, onConfirm }) => { + const portalElement = document.getElementById('portal') + + const dispatch = useDispatch() + + // ==============================|| Snackbar ||============================== // + + useNotifier() + + const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) + const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) + + const [datasetId, setDatasetId] = useState('') + const [datasetName, setDatasetName] = useState('') + const [firstRowHeaders, setFirstRowHeaders] = useState(false) + const [selectedFile, setSelectedFile] = useState() + const [dialogType, setDialogType] = useState('ADD') + + useEffect(() => { + setDatasetId(dialogProps.data.datasetId) + setDatasetName(dialogProps.data.datasetName) + setDialogType('ADD') + + return () => { + setDialogType('ADD') + setDatasetId('') + setDatasetName('') + setFirstRowHeaders(false) + setSelectedFile() + } + }, [dialogProps]) + + useEffect(() => { + if (show) dispatch({ type: SHOW_CANVAS_DIALOG }) + else dispatch({ type: HIDE_CANVAS_DIALOG }) + return () => dispatch({ type: HIDE_CANVAS_DIALOG }) + }, [show, dispatch]) + + const addNewDatasetRow = async () => { + try { + const obj = { + datasetId: datasetId, + firstRowHeaders: firstRowHeaders, + csvFile: selectedFile + } + const createResp = await datasetApi.createDatasetRow(obj) + if (createResp.data) { + enqueueSnackbar({ + message: 'New Row added for the given Dataset', + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + onConfirm(createResp.data.id) + } + } catch (error) { + enqueueSnackbar({ + message: `Failed to add new row in the Dataset: ${ + typeof error.response.data === 'object' ? error.response.data.message : error.response.data + }`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + onCancel() + } + } + + const component = show ? ( + + +
    +
    + +
    + {'Upload Items to [' + datasetName + '] Dataset'} +
    +
    + + +
    + + Upload CSV + ${CSVFORMAT}`} /> + +
    +
    + setSelectedFile(newValue)} + value={selectedFile ?? 'Choose a file to upload'} + /> + +
    +
    + + + (dialogType === 'ADD' ? addNewDatasetRow() : saveDatasetRow())} + > + {dialogProps.confirmButtonName} + + + +
    + ) : null + + return createPortal(component, portalElement) +} + +UploadCSVFileDialog.propTypes = { + show: PropTypes.bool, + dialogProps: PropTypes.object, + onCancel: PropTypes.func, + onConfirm: PropTypes.func +} + +export default UploadCSVFileDialog diff --git a/packages/ui/src/views/datasets/index.jsx b/packages/ui/src/views/datasets/index.jsx new file mode 100644 index 00000000000..8dd69b976c1 --- /dev/null +++ b/packages/ui/src/views/datasets/index.jsx @@ -0,0 +1,339 @@ +import { useEffect, useState } from 'react' +import { useDispatch, useSelector } from 'react-redux' +import moment from 'moment/moment' +import { useNavigate } from 'react-router-dom' + +// material-ui +import { + Skeleton, + Box, + Stack, + TableContainer, + Paper, + Table, + TableHead, + TableRow, + TableCell, + TableBody, + IconButton, + Button +} from '@mui/material' +import { useTheme } from '@mui/material/styles' + +// project imports +import MainCard from '@/ui-component/cards/MainCard' +import ConfirmDialog from '@/ui-component/dialog/ConfirmDialog' +import AddEditDatasetDialog from './AddEditDatasetDialog' +import ViewHeader from '@/layout/MainLayout/ViewHeader' +import ErrorBoundary from '@/ErrorBoundary' +import { StyledTableCell, StyledTableRow } from '@/ui-component/table/TableStyles' +import { StyledPermissionButton } from '@/ui-component/button/RBACButtons' +import { Available } from '@/ui-component/rbac/available' + +// API +import { closeSnackbar as closeSnackbarAction, enqueueSnackbar as enqueueSnackbarAction } from '@/store/actions' +import useConfirm from '@/hooks/useConfirm' +import datasetsApi from '@/api/dataset' + +// Hooks +import useApi from '@/hooks/useApi' +import useNotifier from '@/utils/useNotifier' + +// icons +import empty_datasetSVG from '@/assets/images/empty_datasets.svg' +import { IconTrash, IconEdit, IconPlus, IconX } from '@tabler/icons-react' + +// Utils +import { truncateString } from '@/utils/genericHelper' + +import { useError } from '@/store/context/ErrorContext' + +// ==============================|| Datasets ||============================== // + +const EvalDatasets = () => { + const navigate = useNavigate() + const theme = useTheme() + const { confirm } = useConfirm() + const { error } = useError() + + const customization = useSelector((state) => state.customization) + + useNotifier() + const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) + const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) + + const [search, setSearch] = useState('') + const dispatch = useDispatch() + const [isLoading, setLoading] = useState(true) + const [datasets, setDatasets] = useState([]) + const [showDatasetDialog, setShowDatasetDialog] = useState(false) + const [datasetDialogProps, setDatasetDialogProps] = useState({}) + const getAllDatasets = useApi(datasetsApi.getAllDatasets) + + const goToRows = (selectedDataset) => { + navigate(`/dataset_rows/${selectedDataset.id}`) + } + + const onSearchChange = (event) => { + setSearch(event.target.value) + } + + const addNew = () => { + const dialogProp = { + type: 'ADD', + cancelButtonName: 'Cancel', + confirmButtonName: 'Add', + data: {} + } + setDatasetDialogProps(dialogProp) + setShowDatasetDialog(true) + } + + const edit = (dataset) => { + const dialogProp = { + type: 'EDIT', + cancelButtonName: 'Cancel', + confirmButtonName: 'Save', + data: dataset + } + setDatasetDialogProps(dialogProp) + setShowDatasetDialog(true) + } + + const deleteDataset = async (dataset) => { + const confirmPayload = { + title: `Delete`, + description: `Delete dataset ${dataset.name}?`, + confirmButtonName: 'Delete', + cancelButtonName: 'Cancel' + } + const isConfirmed = await confirm(confirmPayload) + + if (isConfirmed) { + try { + const deleteResp = await datasetsApi.deleteDataset(dataset.id) + if (deleteResp.data) { + enqueueSnackbar({ + message: 'Dataset deleted', + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + onConfirm() + } + } catch (error) { + enqueueSnackbar({ + message: `Failed to delete dataset: ${ + typeof error.response.data === 'object' ? error.response.data.message : error.response.data + }`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + } + } + } + + const onConfirm = () => { + setShowDatasetDialog(false) + getAllDatasets.request() + } + + function filterDatasets(data) { + return data.name.toLowerCase().indexOf(search.toLowerCase()) > -1 + } + + useEffect(() => { + getAllDatasets.request() + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []) + + useEffect(() => { + if (getAllDatasets.data) { + setDatasets(getAllDatasets.data) + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getAllDatasets.data]) + + useEffect(() => { + setLoading(getAllDatasets.loading) + }, [getAllDatasets.loading]) + + return ( + <> + + {error ? ( + + ) : ( + + + } + > + Add New + + + {!isLoading && datasets.length <= 0 ? ( + + + empty_datasetSVG + +
    No Datasets Yet
    +
    + ) : ( + + + + + Name + Description + Rows + Last Updated + + + + + + + + + + {isLoading ? ( + <> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ) : ( + <> + {datasets.filter(filterDatasets).map((ds, index) => ( + + goToRows(ds)} component='th' scope='row'> + {ds.name} + + goToRows(ds)} + style={{ wordWrap: 'break-word', flexWrap: 'wrap', width: '40%' }} + > + {truncateString(ds?.description, 200)} + + goToRows(ds)}>{ds?.rowCount} + goToRows(ds)}> + {moment(ds.updatedDate).format('MMMM Do YYYY, hh:mm A')} + + + + edit(ds)}> + + + + + + + deleteDataset(ds)}> + + + + + + ))} + + )} + +
    +
    + )} +
    + )} +
    + setShowDatasetDialog(false)} + onConfirm={onConfirm} + > + + + ) +} + +export default EvalDatasets diff --git a/packages/ui/src/views/docstore/AddDocStoreDialog.jsx b/packages/ui/src/views/docstore/AddDocStoreDialog.jsx index f77eb958805..b6087184acd 100644 --- a/packages/ui/src/views/docstore/AddDocStoreDialog.jsx +++ b/packages/ui/src/views/docstore/AddDocStoreDialog.jsx @@ -87,10 +87,11 @@ const AddDocStoreDialog = ({ show, dialogProps, onCancel, onConfirm }) => { }) onConfirm(createResp.data.id) } - } catch (err) { - const errorData = typeof err === 'string' ? err : err.response?.data || `${err.response.data.message}` + } catch (error) { enqueueSnackbar({ - message: `Failed to add new Document Store: ${errorData}`, + message: `Failed to add new Document Store: ${ + typeof error.response.data === 'object' ? error.response.data.message : error.response.data + }`, options: { key: new Date().getTime() + Math.random(), variant: 'error', @@ -130,9 +131,10 @@ const AddDocStoreDialog = ({ show, dialogProps, onCancel, onConfirm }) => { onConfirm(saveResp.data.id) } } catch (error) { - const errorData = error.response?.data || `${error.response?.status}: ${error.response?.statusText}` enqueueSnackbar({ - message: `Failed to update Document Store: ${errorData}`, + message: `Failed to update Document Store: ${ + typeof error.response.data === 'object' ? error.response.data.message : error.response.data + }`, options: { key: new Date().getTime() + Math.random(), variant: 'error', diff --git a/packages/ui/src/views/docstore/DocStoreAPIDialog.jsx b/packages/ui/src/views/docstore/DocStoreAPIDialog.jsx index 0807838a044..129d71b5222 100644 --- a/packages/ui/src/views/docstore/DocStoreAPIDialog.jsx +++ b/packages/ui/src/views/docstore/DocStoreAPIDialog.jsx @@ -8,6 +8,7 @@ import documentstoreApi from '@/api/documentstore' import useApi from '@/hooks/useApi' import { useTheme } from '@mui/material/styles' import ExpandMoreIcon from '@mui/icons-material/ExpandMore' +import { baseURL } from '@/store/constant' const DocStoreAPIDialog = ({ show, dialogProps, onCancel }) => { const [nodeConfig, setNodeConfig] = useState({}) @@ -24,7 +25,7 @@ const DocStoreAPIDialog = ({ show, dialogProps, onCancel }) => { import requests import json -API_URL = "http://localhost:3000/api/v1/document-store/upsert/${dialogProps.storeId}" +API_URL = "${baseURL}/api/v1/document-store/upsert/${dialogProps.storeId}" API_KEY = "your_api_key_here" # use form data to upload files @@ -79,7 +80,7 @@ formData.append("createNewDocStore", "false"); async function query(formData) { const response = await fetch( - "http://localhost:3000/api/v1/document-store/upsert/${dialogProps.storeId}", + "${baseURL}/api/v1/document-store/upsert/${dialogProps.storeId}", { method: "POST", headers: { @@ -98,7 +99,7 @@ query(formData).then((response) => { \`\`\` \`\`\`bash -curl -X POST http://localhost:3000/api/v1/document-store/upsert/${dialogProps.storeId} \\ +curl -X POST ${baseURL}/api/v1/document-store/upsert/${dialogProps.storeId} \\ -H "Authorization: Bearer " \\ -F "files=@" \\ -F "docId=${dialogProps.loaderId}" \\ @@ -122,7 +123,7 @@ curl -X POST http://localhost:3000/api/v1/document-store/upsert/${dialogProps.st \`\`\`python import requests -API_URL = "http://localhost:3000/api/v1/document-store/upsert/${dialogProps.storeId}" +API_URL = "${baseURL}/api/v1/document-store/upsert/${dialogProps.storeId}" API_KEY = "your_api_key_here" headers = { @@ -160,7 +161,7 @@ print(output) \`\`\`javascript async function query(data) { const response = await fetch( - "http://localhost:3000/api/v1/document-store/upsert/${dialogProps.storeId}", + "${baseURL}/api/v1/document-store/upsert/${dialogProps.storeId}", { method: "POST", headers: { @@ -200,7 +201,7 @@ query({ \`\`\` \`\`\`bash -curl -X POST http://localhost:3000/api/v1/document-store/upsert/${dialogProps.storeId} \\ +curl -X POST ${baseURL}/api/v1/document-store/upsert/${dialogProps.storeId} \\ -H "Content-Type: application/json" \\ -H "Authorization: Bearer " \\ -d '{ diff --git a/packages/ui/src/views/docstore/DocStoreInputHandler.jsx b/packages/ui/src/views/docstore/DocStoreInputHandler.jsx index 7c6b3e52152..b621e3fdf15 100644 --- a/packages/ui/src/views/docstore/DocStoreInputHandler.jsx +++ b/packages/ui/src/views/docstore/DocStoreInputHandler.jsx @@ -4,7 +4,7 @@ import { useSelector } from 'react-redux' // material-ui import { Box, Typography, IconButton, Button } from '@mui/material' -import { IconRefresh, IconArrowsMaximize, IconAlertTriangle } from '@tabler/icons-react' +import { IconArrowsMaximize, IconAlertTriangle, IconRefresh } from '@tabler/icons-react' // project import import { Dropdown } from '@/ui-component/dropdown/Dropdown' diff --git a/packages/ui/src/views/docstore/DocumentStoreDetail.jsx b/packages/ui/src/views/docstore/DocumentStoreDetail.jsx index 457e2cf4c7d..7661cfcab0e 100644 --- a/packages/ui/src/views/docstore/DocumentStoreDetail.jsx +++ b/packages/ui/src/views/docstore/DocumentStoreDetail.jsx @@ -20,8 +20,7 @@ import { MenuItem, Divider, Button, - Skeleton, - IconButton + Skeleton } from '@mui/material' import { alpha, styled, useTheme } from '@mui/material/styles' import { tableCellClasses } from '@mui/material/TableCell' @@ -35,6 +34,8 @@ import ErrorBoundary from '@/ErrorBoundary' import { StyledButton } from '@/ui-component/button/StyledButton' import ViewHeader from '@/layout/MainLayout/ViewHeader' import DeleteDocStoreDialog from './DeleteDocStoreDialog' +import { Available } from '@/ui-component/rbac/available' +import { PermissionIconButton, StyledPermissionButton } from '@/ui-component/button/RBACButtons' import DocumentStoreStatus from '@/views/docstore/DocumentStoreStatus' import ConfirmDialog from '@/ui-component/dialog/ConfirmDialog' import DocStoreAPIDialog from './DocStoreAPIDialog' @@ -45,6 +46,7 @@ import documentsApi from '@/api/documentstore' // Hooks import useApi from '@/hooks/useApi' import useNotifier from '@/utils/useNotifier' +import { useAuth } from '@/hooks/useAuth' import { getFileName } from '@/utils/genericHelper' import useConfirm from '@/hooks/useConfirm' @@ -62,6 +64,7 @@ import doc_store_details_emptySVG from '@/assets/images/doc_store_details_empty. // store import { closeSnackbar as closeSnackbarAction, enqueueSnackbar as enqueueSnackbarAction } from '@/store/actions' +import { useError } from '@/store/context/ErrorContext' // ==============================|| DOCUMENTS ||============================== // @@ -126,15 +129,17 @@ const DocumentStoreDetails = () => { const customization = useSelector((state) => state.customization) const navigate = useNavigate() const dispatch = useDispatch() + const { hasAssignedWorkspace } = useAuth() useNotifier() const { confirm } = useConfirm() const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) + const { error, setError } = useError() + const { hasPermission } = useAuth() const getSpecificDocumentStore = useApi(documentsApi.getSpecificDocumentStore) - const [error, setError] = useState(null) const [isLoading, setLoading] = useState(true) const [isBackdropLoading, setBackdropLoading] = useState(false) const [showDialog, setShowDialog] = useState(false) @@ -400,20 +405,17 @@ const DocumentStoreDetails = () => { useEffect(() => { if (getSpecificDocumentStore.data) { + const workspaceId = getSpecificDocumentStore.data.workspaceId + if (!hasAssignedWorkspace(workspaceId)) { + navigate('/unauthorized') + return + } setDocumentStore(getSpecificDocumentStore.data) - // total the chunks and chars } // eslint-disable-next-line react-hooks/exhaustive-deps }, [getSpecificDocumentStore.data]) - useEffect(() => { - if (getSpecificDocumentStore.error) { - setError(getSpecificDocumentStore.error) - } - // eslint-disable-next-line react-hooks/exhaustive-deps - }, [getSpecificDocumentStore.error]) - useEffect(() => { setLoading(getSpecificDocumentStore.loading) }, [getSpecificDocumentStore.loading]) @@ -427,7 +429,7 @@ const DocumentStoreDetails = () => { { onEdit={() => onEditClicked()} > {(documentStore?.status === 'STALE' || documentStore?.status === 'UPSERTING') && ( - + - + )} - } onClick={listLoaders} > Add Document Loader - + - - - - Preview & Process - - - - View & Edit Chunks - - - - Upsert Chunks - - - - View API - - - - - Delete - - -
    - + + +
    + + + + + + Preview & Process + + + + + + View & Edit Chunks + + + + + + Upsert Chunks + + + + + + View API + + + + + + + Delete + + + +
    +
    +
    ) diff --git a/packages/ui/src/views/docstore/ExpandedChunkDialog.jsx b/packages/ui/src/views/docstore/ExpandedChunkDialog.jsx index a0b85b10fca..0d7976e00a2 100644 --- a/packages/ui/src/views/docstore/ExpandedChunkDialog.jsx +++ b/packages/ui/src/views/docstore/ExpandedChunkDialog.jsx @@ -11,6 +11,7 @@ import { IconEdit, IconTrash, IconX, IconLanguage } from '@tabler/icons-react' // Project imports import { CodeEditor } from '@/ui-component/editor/CodeEditor' +import { PermissionButton, PermissionIconButton } from '@/ui-component/button/RBACButtons' const ExpandedChunkDialog = ({ show, dialogProps, onCancel, onChunkEdit, onDeleteChunk, isReadOnly }) => { const portalElement = document.getElementById('portal') @@ -87,9 +88,16 @@ const ExpandedChunkDialog = ({ show, dialogProps, onCancel, onChunkEdit, onDelet #{selectedChunkNumber}. {selectedChunk.id} {!isEdit && !isReadOnly && ( - setIsEdit(true)} size='small' color='primary' title='Edit Chunk' sx={{ ml: 2 }}> + setIsEdit(true)} + size='small' + color='primary' + title='Edit Chunk' + sx={{ ml: 2 }} + > - + )} {isEdit && !isReadOnly && ( )} {isEdit && !isReadOnly && ( - + )} {!isEdit && !isReadOnly && ( - onDeleteChunk(selectedChunk)} size='small' color='error' @@ -116,7 +126,7 @@ const ExpandedChunkDialog = ({ show, dialogProps, onCancel, onChunkEdit, onDelet sx={{ ml: 1 }} > - + )} diff --git a/packages/ui/src/views/docstore/LoaderConfigPreviewChunks.jsx b/packages/ui/src/views/docstore/LoaderConfigPreviewChunks.jsx index 4554f393a98..d23a3f359cf 100644 --- a/packages/ui/src/views/docstore/LoaderConfigPreviewChunks.jsx +++ b/packages/ui/src/views/docstore/LoaderConfigPreviewChunks.jsx @@ -7,6 +7,7 @@ import ReactJson from 'flowise-react-json-view' // Hooks import useApi from '@/hooks/useApi' +import { useAuth } from '@/hooks/useAuth' // Material-UI import { Skeleton, Toolbar, Box, Button, Card, CardContent, Grid, OutlinedInput, Stack, Typography, TextField } from '@mui/material' @@ -31,6 +32,7 @@ import documentsApi from '@/api/documentstore' // Const import { baseURL, gridSpacing } from '@/store/constant' import { closeSnackbar as closeSnackbarAction, enqueueSnackbar as enqueueSnackbarAction } from '@/store/actions' +import { useError } from '@/store/context/ErrorContext' // Utils import { initNode } from '@/utils/genericHelper' @@ -61,6 +63,8 @@ const LoaderConfigPreviewChunks = () => { const customization = useSelector((state) => state.customization) const navigate = useNavigate() const theme = useTheme() + const { error } = useError() + const { hasAssignedWorkspace } = useAuth() const getNodeDetailsApi = useApi(nodesApi.getSpecificNode) const getNodesByCategoryApi = useApi(nodesApi.getNodesByCategory) @@ -71,7 +75,6 @@ const LoaderConfigPreviewChunks = () => { const [selectedDocumentLoader, setSelectedDocumentLoader] = useState({}) const [loading, setLoading] = useState(false) - const [error, setError] = useState(null) const [loaderName, setLoaderName] = useState('') const [textSplitterNodes, setTextSplitterNodes] = useState([]) @@ -117,21 +120,27 @@ const LoaderConfigPreviewChunks = () => { const checkMandatoryFields = () => { let canSubmit = true + const missingFields = [] const inputParams = (selectedDocumentLoader.inputParams ?? []).filter((inputParam) => !inputParam.hidden) for (const inputParam of inputParams) { if (!inputParam.optional && (!selectedDocumentLoader.inputs[inputParam.name] || !selectedDocumentLoader.credential)) { - if (inputParam.type === 'credential' && !selectedDocumentLoader.credential) { + if ( + inputParam.type === 'credential' && + !selectedDocumentLoader.credential && + !selectedDocumentLoader.inputs['FLOWISE_CREDENTIAL_ID'] + ) { canSubmit = false - break + missingFields.push(inputParam.label || inputParam.name) } else if (inputParam.type !== 'credential' && !selectedDocumentLoader.inputs[inputParam.name]) { canSubmit = false - break + missingFields.push(inputParam.label || inputParam.name) } } } if (!canSubmit) { + const fieldsList = missingFields.join(', ') enqueueSnackbar({ - message: 'Please fill in all mandatory fields.', + message: `Please fill in the following mandatory fields: ${fieldsList}`, options: { key: new Date().getTime() + Math.random(), variant: 'warning', @@ -335,6 +344,11 @@ const LoaderConfigPreviewChunks = () => { useEffect(() => { if (getSpecificDocumentStoreApi.data) { + const workspaceId = getSpecificDocumentStoreApi.data.workspaceId + if (!hasAssignedWorkspace(workspaceId)) { + navigate('/unauthorized') + return + } if (getSpecificDocumentStoreApi.data?.loaders.length > 0) { const loader = getSpecificDocumentStoreApi.data.loaders.find((loader) => loader.id === docLoaderNodeName) if (loader) { @@ -347,30 +361,6 @@ const LoaderConfigPreviewChunks = () => { // eslint-disable-next-line react-hooks/exhaustive-deps }, [getSpecificDocumentStoreApi.data]) - useEffect(() => { - if (getSpecificDocumentStoreApi.error) { - setError(getSpecificDocumentStoreApi.error) - } - - // eslint-disable-next-line react-hooks/exhaustive-deps - }, [getSpecificDocumentStoreApi.error]) - - useEffect(() => { - if (getNodeDetailsApi.error) { - setError(getNodeDetailsApi.error) - } - - // eslint-disable-next-line react-hooks/exhaustive-deps - }, [getNodeDetailsApi.error]) - - useEffect(() => { - if (getNodesByCategoryApi.error) { - setError(getNodesByCategoryApi.error) - } - - // eslint-disable-next-line react-hooks/exhaustive-deps - }, [getNodesByCategoryApi.error]) - return ( <> diff --git a/packages/ui/src/views/docstore/ShowStoredChunks.jsx b/packages/ui/src/views/docstore/ShowStoredChunks.jsx index 07d477e8ac5..e02ecadb429 100644 --- a/packages/ui/src/views/docstore/ShowStoredChunks.jsx +++ b/packages/ui/src/views/docstore/ShowStoredChunks.jsx @@ -16,6 +16,7 @@ import { BackdropLoader } from '@/ui-component/loading/BackdropLoader' import ConfirmDialog from '@/ui-component/dialog/ConfirmDialog' import ExpandedChunkDialog from './ExpandedChunkDialog' import ViewHeader from '@/layout/MainLayout/ViewHeader' +import ErrorBoundary from '@/ErrorBoundary' // API import documentsApi from '@/api/documentstore' @@ -24,9 +25,11 @@ import documentsApi from '@/api/documentstore' import useApi from '@/hooks/useApi' import useConfirm from '@/hooks/useConfirm' import useNotifier from '@/utils/useNotifier' +import { useAuth } from '@/hooks/useAuth' // store import { closeSnackbar as closeSnackbarAction, enqueueSnackbar as enqueueSnackbarAction } from '@/store/actions' +import { useError } from '@/store/context/ErrorContext' const CardWrapper = styled(MainCard)(({ theme }) => ({ background: theme.palette.card.main, @@ -53,6 +56,8 @@ const ShowStoredChunks = () => { const dispatch = useDispatch() const theme = useTheme() const { confirm } = useConfirm() + const { error } = useError() + const { hasAssignedWorkspace } = useAuth() useNotifier() const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) @@ -196,6 +201,11 @@ const ShowStoredChunks = () => { useEffect(() => { if (getChunksApi.data) { const data = getChunksApi.data + const workspaceId = data.workspaceId + if (!hasAssignedWorkspace(workspaceId)) { + navigate('/unauthorized') + return + } setTotalChunks(data.count) setDocumentChunks(data.chunks) setLoading(false) @@ -217,156 +227,160 @@ const ShowStoredChunks = () => { return ( <> - - navigate(-1)} - > -
    - {fileNames.length > 0 && ( - - {fileNames.map((fileName, index) => ( + {error ? ( + + ) : ( + + navigate(-1)} + > +
    + {fileNames.length > 0 && ( + + {fileNames.map((fileName, index) => ( +
    + {fileName} +
    + ))} +
    + )} +
    +
    + changePage(currentPage - 1)} + style={{ marginRight: 10 }} + variant='outlined' + disabled={currentPage === 1} + > + + + Showing {Math.min(start, totalChunks)}-{end} of {totalChunks} chunks + changePage(currentPage + 1)} + style={{ marginLeft: 10 }} + variant='outlined' + disabled={end >= totalChunks} + > + = totalChunks + ? '#616161' + : 'white' + : end >= totalChunks + ? '#e0e0e0' + : 'black' + } + /> + +
    +
    + + {getChunksApi.data?.characters?.toLocaleString()} characters +
    +
    +
    +
    + + {!documentChunks.length && (
    - {fileName} + + chunks_emptySVG + +
    No Chunks
    - ))} + )} + {documentChunks.length > 0 && + documentChunks.map((row, index) => ( + + chunkSelected(row.id)} + sx={{ border: 1, borderColor: theme.palette.grey[900] + 25, borderRadius: 2 }} + > + + + + {`#${row.chunkNo}. Characters: ${row.pageContent.length}`} + + + {row.pageContent} + + + + + + + ))}
    - )} -
    -
    - changePage(currentPage - 1)} - style={{ marginRight: 10 }} - variant='outlined' - disabled={currentPage === 1} - > - - - Showing {Math.min(start, totalChunks)}-{end} of {totalChunks} chunks - changePage(currentPage + 1)} - style={{ marginLeft: 10 }} - variant='outlined' - disabled={end >= totalChunks} - > - = totalChunks - ? '#616161' - : 'white' - : end >= totalChunks - ? '#e0e0e0' - : 'black' - } - /> - -
    -
    - - {getChunksApi.data?.characters?.toLocaleString()} characters -
    -
    -
    - - {!documentChunks.length && ( -
    - - chunks_emptySVG - -
    No Chunks
    -
    - )} - {documentChunks.length > 0 && - documentChunks.map((row, index) => ( - - chunkSelected(row.id)} - sx={{ border: 1, borderColor: theme.palette.grey[900] + 25, borderRadius: 2 }} - > - - - - {`#${row.chunkNo}. Characters: ${row.pageContent.length}`} - - - {row.pageContent} - - - - - - - ))} -
    -
    -
    + + )} { const navigate = useNavigate() const dispatch = useDispatch() + const { hasAssignedWorkspace } = useAuth() useNotifier() + const { error, setError } = useError() const customization = useSelector((state) => state.customization) const { storeId, docId } = useParams() @@ -62,9 +66,7 @@ const VectorStoreConfigure = () => { const getVectorStoreNodeDetailsApi = useApi(nodesApi.getSpecificNode) const getRecordManagerNodeDetailsApi = useApi(nodesApi.getSpecificNode) - const [error, setError] = useState(null) const [loading, setLoading] = useState(true) - const [documentStore, setDocumentStore] = useState({}) const [dialogProps, setDialogProps] = useState({}) @@ -377,6 +379,10 @@ const VectorStoreConfigure = () => { useEffect(() => { if (getSpecificDocumentStoreApi.data) { const docStore = getSpecificDocumentStoreApi.data + if (!hasAssignedWorkspace(docStore.workspaceId)) { + navigate('/unauthorized') + return + } setDocumentStore(docStore) if (docStore.embeddingConfig) { getEmbeddingNodeDetailsApi.request(docStore.embeddingConfig.name) diff --git a/packages/ui/src/views/docstore/VectorStoreQuery.jsx b/packages/ui/src/views/docstore/VectorStoreQuery.jsx index 100683912d8..6e0fd1b0ead 100644 --- a/packages/ui/src/views/docstore/VectorStoreQuery.jsx +++ b/packages/ui/src/views/docstore/VectorStoreQuery.jsx @@ -20,6 +20,7 @@ import ConfirmDialog from '@/ui-component/dialog/ConfirmDialog' import ExpandedChunkDialog from './ExpandedChunkDialog' import ViewHeader from '@/layout/MainLayout/ViewHeader' import DocStoreInputHandler from '@/views/docstore/DocStoreInputHandler' +import { PermissionButton } from '@/ui-component/button/RBACButtons' // API import documentsApi from '@/api/documentstore' @@ -27,6 +28,7 @@ import nodesApi from '@/api/nodes' // Hooks import useApi from '@/hooks/useApi' +import { useAuth } from '@/hooks/useAuth' import useNotifier from '@/utils/useNotifier' import { baseURL } from '@/store/constant' import { initNode } from '@/utils/genericHelper' @@ -57,6 +59,7 @@ const VectorStoreQuery = () => { const theme = useTheme() const dispatch = useDispatch() const inputRef = useRef(null) + const { hasAssignedWorkspace } = useAuth() useNotifier() @@ -227,6 +230,10 @@ const VectorStoreQuery = () => { useEffect(() => { if (getSpecificDocumentStoreApi.data) { + if (!hasAssignedWorkspace(getSpecificDocumentStoreApi.data.workspaceId)) { + navigate('/unauthorized') + return + } setDocumentStore(getSpecificDocumentStoreApi.data) const vectorStoreConfig = getSpecificDocumentStoreApi.data.vectorStoreConfig if (vectorStoreConfig) { @@ -249,7 +256,8 @@ const VectorStoreQuery = () => { description='Retrieval Playground - Test your vector store retrieval settings' onBack={() => navigate(-1)} > - +
    diff --git a/packages/ui/src/views/docstore/index.jsx b/packages/ui/src/views/docstore/index.jsx index b399af873ca..afa8e875cc1 100644 --- a/packages/ui/src/views/docstore/index.jsx +++ b/packages/ui/src/views/docstore/index.jsx @@ -23,11 +23,11 @@ import { useTheme } from '@mui/material/styles' // project imports import MainCard from '@/ui-component/cards/MainCard' import DocumentStoreCard from '@/ui-component/cards/DocumentStoreCard' -import { StyledButton } from '@/ui-component/button/StyledButton' import AddDocStoreDialog from '@/views/docstore/AddDocStoreDialog' import ErrorBoundary from '@/ErrorBoundary' import ViewHeader from '@/layout/MainLayout/ViewHeader' import DocumentStoreStatus from '@/views/docstore/DocumentStoreStatus' +import { StyledPermissionButton } from '@/ui-component/button/RBACButtons' // API import useApi from '@/hooks/useApi' @@ -39,6 +39,7 @@ import doc_store_empty from '@/assets/images/doc_store_empty.svg' // const import { baseURL, gridSpacing } from '@/store/constant' +import { useError } from '@/store/context/ErrorContext' // ==============================|| DOCUMENTS ||============================== // @@ -48,8 +49,8 @@ const Documents = () => { const navigate = useNavigate() const getAllDocumentStores = useApi(documentsApi.getAllDocumentStores) + const { error } = useError() - const [error, setError] = useState(null) const [isLoading, setLoading] = useState(true) const [images, setImages] = useState({}) const [search, setSearch] = useState('') @@ -135,10 +136,6 @@ const Documents = () => { setLoading(getAllDocumentStores.loading) }, [getAllDocumentStores.loading]) - useEffect(() => { - setError(getAllDocumentStores.error) - }, [getAllDocumentStores.error]) - return ( {error ? ( @@ -184,7 +181,8 @@ const Documents = () => { - { id='btn_createVariable' > Add New - + {!view || view === 'card' ? ( <> diff --git a/packages/ui/src/views/evaluations/ChartLatency.jsx b/packages/ui/src/views/evaluations/ChartLatency.jsx new file mode 100644 index 00000000000..762ac533ac2 --- /dev/null +++ b/packages/ui/src/views/evaluations/ChartLatency.jsx @@ -0,0 +1,59 @@ +import { CartesianGrid, Line, LineChart, ResponsiveContainer, XAxis, YAxis, Tooltip } from 'recharts' +import PropTypes from 'prop-types' + +const empty = [] + +const COLORS = ['#00C49F', '#0088FE', '#82ca9d', '#113333', '#FF3322'] + +export const ChartLatency = ({ data, flowNames, onClick }) => { + return ( + + + + + + + {flowNames.map((key, index) => ( + + ))} + + + ) +} + +ChartLatency.propTypes = { + data: PropTypes.array, + flowNames: PropTypes.array, + onClick: PropTypes.func +} diff --git a/packages/ui/src/views/evaluations/ChartPassPrnt.jsx b/packages/ui/src/views/evaluations/ChartPassPrnt.jsx new file mode 100644 index 00000000000..88c097ecc4f --- /dev/null +++ b/packages/ui/src/views/evaluations/ChartPassPrnt.jsx @@ -0,0 +1,37 @@ +import { ResponsiveContainer, PieChart, Pie, Cell, Legend } from 'recharts' +import PropTypes from 'prop-types' + +// success, failure, error +const COLORS = ['#2ecc71', '#e74c3c', '#f39c12'] +const RADIAN = Math.PI / 180 + +const renderCustomizedLabel = ({ cx, cy, midAngle, innerRadius, outerRadius, percent }) => { + const radius = innerRadius + (outerRadius - innerRadius) * 0.35 + const x = cx + radius * Math.cos(-midAngle * RADIAN) + const y = cy + radius * Math.sin(-midAngle * RADIAN) + + return ( + cx ? 'start' : 'end'} dominantBaseline='central' fontSize='11'> + {`${(percent * 100).toFixed(2)}%`} + + ) +} + +export const ChartPassPrnt = ({ data }) => { + return ( + + + + + + + + + + + ) +} + +ChartPassPrnt.propTypes = { + data: PropTypes.array +} diff --git a/packages/ui/src/views/evaluations/ChartTokens.jsx b/packages/ui/src/views/evaluations/ChartTokens.jsx new file mode 100644 index 00000000000..ce8e091039b --- /dev/null +++ b/packages/ui/src/views/evaluations/ChartTokens.jsx @@ -0,0 +1,57 @@ +import { CartesianGrid, ResponsiveContainer, XAxis, YAxis, Tooltip, Bar, BarChart } from 'recharts' +import PropTypes from 'prop-types' + +export const ChartTokens = ({ data, flowNames }) => { + return ( + + + + + + + {flowNames.map((name, index) => ( + <> + + + + ))} + + + ) +} + +ChartTokens.propTypes = { + data: PropTypes.array, + flowNames: PropTypes.array +} diff --git a/packages/ui/src/views/evaluations/CreateEvaluationDialog.jsx b/packages/ui/src/views/evaluations/CreateEvaluationDialog.jsx new file mode 100644 index 00000000000..a18453989c5 --- /dev/null +++ b/packages/ui/src/views/evaluations/CreateEvaluationDialog.jsx @@ -0,0 +1,637 @@ +import { createPortal } from 'react-dom' +import PropTypes from 'prop-types' +import { useState, useEffect } from 'react' + +// Material +import { + Dialog, + DialogActions, + DialogContent, + DialogTitle, + Box, + Typography, + Chip, + OutlinedInput, + Divider, + Stack, + DialogContentText, + Button, + Stepper, + Step, + Switch, + StepLabel, + IconButton, + FormControlLabel +} from '@mui/material' +import { useTheme } from '@mui/material/styles' + +// Project imports +import { StyledButton } from '@/ui-component/button/StyledButton' +import ConfirmDialog from '@/ui-component/dialog/ConfirmDialog' +import { Dropdown } from '@/ui-component/dropdown/Dropdown' +import CredentialInputHandler from '@/views/canvas/CredentialInputHandler' +import { TooltipWithParser } from '@/ui-component/tooltip/TooltipWithParser' +import { MultiDropdown } from '@/ui-component/dropdown/MultiDropdown' + +// Icons +import { IconArrowLeft, IconAlertTriangle, IconTestPipe2 } from '@tabler/icons-react' + +// API +import chatflowsApi from '@/api/chatflows' +import useApi from '@/hooks/useApi' +import datasetsApi from '@/api/dataset' +import evaluatorsApi from '@/api/evaluators' +import nodesApi from '@/api/nodes' + +// utils +import useNotifier from '@/utils/useNotifier' + +// const +import { evaluators as evaluatorsOptions } from '../evaluators/evaluatorConstant' + +const steps = ['Datasets', 'Evaluators', 'LLM Graded Metrics'] + +const CreateEvaluationDialog = ({ show, dialogProps, onCancel, onConfirm }) => { + const portalElement = document.getElementById('portal') + const theme = useTheme() + useNotifier() + + const getAllChatflowsApi = useApi(chatflowsApi.getAllChatflows) + const getAllDatasetsApi = useApi(datasetsApi.getAllDatasets) + const getAllEvaluatorsApi = useApi(evaluatorsApi.getAllEvaluators) + const getNodesByCategoryApi = useApi(nodesApi.getNodesByCategory) + const getModelsApi = useApi(nodesApi.executeNodeLoadMethod) + + const [chatflow, setChatflow] = useState([]) + const [dataset, setDataset] = useState('') + const [datasetAsOneConversation, setDatasetAsOneConversation] = useState(false) + + const [flows, setFlows] = useState([]) + const [datasets, setDatasets] = useState([]) + const [credentialId, setCredentialId] = useState('') + const [evaluationName, setEvaluationName] = useState('') + const [availableSimpleEvaluators, setAvailableSimpleEvaluators] = useState([]) + const [availableLLMEvaluators, setAvailableLLMEvaluators] = useState([]) + const [selectedSimpleEvaluators, setSelectedSimpleEvaluators] = useState([]) + const [selectedLLMEvaluators, setSelectedLLMEvaluators] = useState([]) + + const [activeStep, setActiveStep] = useState(0) + const [useLLM, setUseLLM] = useState(false) + + const [validationFailed, setValidationFailed] = useState(false) + + const [chatLLMs, setChatLLMs] = useState([]) + const [selectedLLM, setSelectedLLM] = useState('no_grading') + const [availableModels, setAvailableModels] = useState([]) + const [selectedModel, setSelectedModel] = useState('') + + useEffect(() => { + if (dialogProps.type === 'NEW' && dialogProps.data) { + const evaluation = dialogProps.data + const evalChatFlows = [] + JSON.parse(evaluation.chatflowId).map((f) => { + evalChatFlows.push(f) + }) + setChatflow(evalChatFlows) + setDataset(evaluation.datasetId) + setCredentialId('') + setSelectedModel('') + setSelectedLLM('no_grading') + setEvaluationName('') + setSelectedSimpleEvaluators([]) + setSelectedLLMEvaluators([]) + setActiveStep(0) + setUseLLM(false) + setCredentialId('') + } else { + resetData() + } + + return () => { + resetData() + } + }, [dialogProps]) + + const resetData = () => { + setDataset('') + setCredentialId('') + setEvaluationName('') + setSelectedSimpleEvaluators([]) + setSelectedLLMEvaluators([]) + setActiveStep(0) + setChatflow([]) + setSelectedModel('') + setSelectedLLM('no_grading') + setUseLLM(false) + setDatasetAsOneConversation(false) + } + + const validate = () => { + if (activeStep === 0) { + return evaluationName && dataset && chatflow.length > 0 + } else if (activeStep === 1) { + return true + } else if (activeStep === 2) { + if (useLLM) { + return credentialId && selectedLLM && selectedModel + } else { + return true + } + } + return false + } + + const goNext = async (prevActiveStep) => { + const isValid = validate() + setValidationFailed(!isValid) + if (isValid) { + if (prevActiveStep === steps.length - 1) { + createNewEvaluation() + } else { + setActiveStep((prevActiveStep) => prevActiveStep + 1) + } + } + } + + const goPrev = async () => { + setActiveStep((prevActiveStep) => prevActiveStep - 1) + } + + const createNewEvaluation = async () => { + const selectedChatflows = JSON.parse(chatflow) + const selectedChatflowNames = [] + for (let i = 0; i < selectedChatflows.length; i += 1) { + selectedChatflowNames.push(flows.find((f) => f.name === selectedChatflows[i])?.label) + } + const chatflowName = JSON.stringify(selectedChatflowNames) + const datasetName = datasets.find((f) => f.name === dataset)?.label + const obj = { + name: evaluationName, + evaluationType: credentialId ? 'llm' : 'benchmarking', + credentialId: credentialId, + datasetId: dataset, + datasetName: datasetName, + chatflowId: chatflow, + chatflowName: chatflowName, + selectedSimpleEvaluators: selectedSimpleEvaluators, + selectedLLMEvaluators: selectedLLMEvaluators, + model: selectedModel, + llm: selectedLLM, + datasetAsOneConversation: datasetAsOneConversation + } + onConfirm(obj) + } + + const disableButton = () => { + if (activeStep === 0) { + return !evaluationName || !dataset || chatflow.length === 0 + } else if (activeStep === 2) { + if (useLLM) { + if (!selectedModel || !selectedLLM || selectedLLMEvaluators.length === 0) { + return true + } + if (chatLLMs.find((llm) => llm.name === selectedLLM)?.credential && !credentialId) { + return true + } + } + return false + } + } + + const EvalWizard = () => { + return ( + + + {steps.map((label) => ( + + {label} + + ))} + + + ) + } + + useEffect(() => { + getNodesByCategoryApi.request('Chat Models') + if (flows.length === 0) { + getAllChatflowsApi.request() + } + if (datasets.length === 0) { + getAllDatasetsApi.request() + } + getAllEvaluatorsApi.request() + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []) + + useEffect(() => { + if (getAllChatflowsApi.data) { + try { + const chatflows = getAllChatflowsApi.data + let flowNames = [] + for (let i = 0; i < chatflows.length; i += 1) { + const flow = chatflows[i] + flowNames.push({ + label: flow.name, + name: flow.id + }) + } + setFlows(flowNames) + } catch (e) { + console.error(e) + } + } + }, [getAllChatflowsApi.data]) + + useEffect(() => { + if (getNodesByCategoryApi.data) { + const llmNodes = [] + try { + const nodes = getNodesByCategoryApi.data + llmNodes.push({ + label: 'No Grading', + name: 'no_grading', + credential: {} + }) + for (let i = 0; i < nodes.length; i += 1) { + const node = nodes[i] + if (!node.tags || !node.tags.indexOf('[LlamaIndex]') === -1) { + llmNodes.push({ + label: node.label, + name: node.name, + credential: node.credential + }) + } + } + setChatLLMs(llmNodes) + setSelectedLLM('no_grading') + setSelectedModel('') + setCredentialId('') + } catch (e) { + console.error(e) + } + } + }, [getNodesByCategoryApi.data]) + + useEffect(() => { + if (getModelsApi.data) { + try { + const models = getModelsApi.data + setAvailableModels(models) + } catch (e) { + console.error(e) + } + } + }, [getModelsApi.data]) + + useEffect(() => { + if (getAllEvaluatorsApi.data) { + try { + const simpleEvaluators = [] + const llmEvaluators = [] + // iterate over the evaluators and add a new property label that is the name of the evaluator + // also set the name to the id + for (let i = 0; i < getAllEvaluatorsApi.data.length; i += 1) { + const evaluator = getAllEvaluatorsApi.data[i] + evaluator.label = evaluator.name + evaluator.name = evaluator.id + if (evaluator.type === 'llm') { + llmEvaluators.push(evaluator) + } else { + simpleEvaluators.push(evaluator) + } + } + setAvailableSimpleEvaluators(simpleEvaluators) + setAvailableLLMEvaluators(llmEvaluators) + } catch (e) { + console.error(e) + } + } + }, [getAllEvaluatorsApi.data]) + + useEffect(() => { + if (getAllDatasetsApi.data) { + try { + const datasets = getAllDatasetsApi.data + let dsNames = [] + for (let i = 0; i < datasets.length; i += 1) { + const ds = datasets[i] + dsNames.push({ + label: ds.name, + name: ds.id + }) + } + setDatasets(dsNames) + } catch (e) { + console.error(e) + } + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getAllDatasetsApi.data]) + + const selectLLMForEval = (llm) => { + setUseLLM(llm !== 'no_grading') + setSelectedLLM(llm) + setSelectedModel('') + setCredentialId('') + if (llm !== 'no_grading') getModelsApi.request(llm, { loadMethod: 'listModels' }) + } + + const component = show ? ( + + +
    + + {'Start New Evaluation'} +
    +
    + + + + {validationFailed && ( +
    +
    + +
    + Fill all the mandatory fields +
    + )} + + + {activeStep === 0 && ( + <> + + Select dataset to be tested on flows + + + Uses the input column from the dataset to execute selected + Chatflow(s), and compares the results with the output column. + + The following metrics will be computed: + + {evaluatorsOptions + .filter((opt) => opt.type === 'numeric' && opt.name !== 'chain') + .map((evaluator, index) => ( + + ))} + + + )} + {activeStep === 1 && ( + <> + + Unit Test your flows by adding custom evaluators + + + Post execution, all the chosen evaluators will be executed on the results. Each evaluator will grade the + results based on the criteria defined and return a pass/fail indicator. + + + + + )} + {activeStep === 2 && ( + <> + + Grade flows using an LLM + + + Post execution, grades the answers by using an LLM. Used to generate comparative scores or reasoning or + other custom defined criteria. + + + )} + + {activeStep === 0 && ( + <> + + + Name * + + + setEvaluationName(e.target.value)} + /> + + + + Dataset to use * + + setDataset(newValue)} + value={dataset} + /> + + + + Treat all dataset rows as one conversation ? + + } + value={datasetAsOneConversation} + onChange={() => setDatasetAsOneConversation(!datasetAsOneConversation)} + /> + + + + Chatflow(s) to Evaluate * + + setChatflow(newValue)} + value={chatflow ?? chatflow ?? 'choose an option'} + /> + + + )} + {activeStep === 1 && ( + <> + + Select the Evaluators + setSelectedSimpleEvaluators(newValue)} + value={selectedSimpleEvaluators} + /> + + + )} + {activeStep === 2 && ( + <> + + + Use an LLM to grade the results ? + + selectLLMForEval(newValue)} + /> + + {useLLM && availableModels.length > 0 && ( + + Select Model + setSelectedModel(newValue)} + /> + + )} + {useLLM && availableModels.length === 0 && ( + + Enter the Model Name + setSelectedModel(e.target.value)} + /> + + )} + {useLLM && chatLLMs.find((llm) => llm.name === selectedLLM)?.credential && ( + + Select Credential + llm.name === selectedLLM)?.credential.credentialNames[0] + ] + }} + onSelect={(newValue) => { + setCredentialId(newValue) + }} + /> + + )} + {useLLM && ( + + Select Evaluators + setSelectedLLMEvaluators(newValue)} + value={selectedLLMEvaluators} + /> + + )} + + )} + +
    +
    + + {activeStep > 0 && ( + goPrev(activeStep)}> + + + )} +
    + {activeStep === 1 && selectedSimpleEvaluators.length === 0 && ( + + )} + {activeStep === 1 && selectedSimpleEvaluators.length > 0 && ( + + )} + {activeStep !== 1 && ( + goNext(activeStep)} + > + {activeStep === steps.length - 1 ? 'Start Evaluation' : 'Next'} + + )} +
    + +
    + ) : null + + return createPortal(component, portalElement) +} + +CreateEvaluationDialog.propTypes = { + show: PropTypes.bool, + dialogProps: PropTypes.object, + onCancel: PropTypes.func, + onConfirm: PropTypes.func +} + +export default CreateEvaluationDialog diff --git a/packages/ui/src/views/evaluations/EvalsResultDialog.jsx b/packages/ui/src/views/evaluations/EvalsResultDialog.jsx new file mode 100644 index 00000000000..3fb013f0de4 --- /dev/null +++ b/packages/ui/src/views/evaluations/EvalsResultDialog.jsx @@ -0,0 +1,441 @@ +import React from 'react' +import { createPortal } from 'react-dom' +import PropTypes from 'prop-types' +import { useSelector } from 'react-redux' +import { useNavigate } from 'react-router-dom' + +// Material +import { + Stack, + Chip, + TableContainer, + Table, + TableHead, + TableBody, + TableRow, + Dialog, + DialogContent, + DialogTitle, + Paper, + Button, + TableCell +} from '@mui/material' +import { useTheme } from '@mui/material/styles' +import { IconVectorBezier2, IconMinimize } from '@tabler/icons-react' +import LLMIcon from '@mui/icons-material/ModelTraining' +import AlarmIcon from '@mui/icons-material/AlarmOn' +import TokensIcon from '@mui/icons-material/AutoAwesomeMotion' +import PaidIcon from '@mui/icons-material/Paid' + +// Project imports +import { StyledTableCell, StyledTableRow } from '@/ui-component/table/TableStyles' + +// const + +const EvalsResultDialog = ({ show, dialogProps, onCancel, openDetailsDrawer }) => { + const portalElement = document.getElementById('portal') + const customization = useSelector((state) => state.customization) + const theme = useTheme() + const navigate = useNavigate() + + const getColSpan = (evaluationsShown, llmEvaluations) => { + let colSpan = 1 + if (evaluationsShown) colSpan++ + if (llmEvaluations) colSpan++ + return colSpan + } + + const component = show ? ( + + + + {dialogProps.data && dialogProps.data.evaluation.chatflowName?.length > 0 && ( + +
    + + Chatflows Used: +
    + {(dialogProps.data.evaluation.chatflowName || []).map((chatflowUsed, index) => ( + navigate('/canvas/' + dialogProps.data.evaluation.chatflowId[index])} + > + ))} +
    + )} + +
    +
    + + + + + +   + Input + Expected Output + {dialogProps.data && + dialogProps.data.evaluation.chatflowId?.map((chatflowId, index) => ( + + + {dialogProps.data.evaluation.chatflowName[index]} + {dialogProps.data.rows.length > 0 && dialogProps.data.rows[0].metrics[index].model && ( + } + color={'info'} + size='small' + label={ + dialogProps.data.rows[0].metrics[index].model + + (dialogProps.data.rows[0].metrics[index].provider + ? ' [' + dialogProps.data.rows[0].metrics[index].provider + ']' + : '') + } + sx={{ ml: 2 }} + /> + )} + + + ))} + + + {dialogProps.data && + dialogProps.data.evaluation.chatflowId?.map((chatflowId, index) => ( + + + Actual Output + + {dialogProps.data.customEvalsDefined && dialogProps.data.showCustomEvals && ( + Evaluator + )} + {dialogProps.data.evaluation?.evaluationType === 'llm' && LLM Evaluation} + + ))} + + + + <> + {dialogProps.data && + dialogProps.data.rows.length > 0 && + dialogProps.data.rows.map((item, index) => ( + openDetailsDrawer(item)} + hover + key={index} + sx={{ cursor: 'pointer', '&:last-child td, &:last-child th': { border: 0 } }} + > + {index + 1} + {item.input} + {item.expectedOutput} + {dialogProps.data.evaluation.chatflowId?.map((_, index) => ( + + + {item.errors[index] === '' ? ( + <> +
    + {item.actualOutput[index]} +
    + + } + size='small' + label={ + item.metrics[index]?.totalCost + ? 'Total Cost: ' + item.metrics[index]?.totalCost + : 'Total Cost: N/A' + } + sx={{ mr: 1, mb: 1 }} + /> + } + label={ + item.metrics[index]?.totalTokens + ? 'Total Tokens: ' + item.metrics[index]?.totalTokens + : 'Total Tokens: N/A' + } + sx={{ mr: 1, mb: 1 }} + /> + {dialogProps.data.showTokenMetrics && ( + <> + } + label={ + item.metrics[index]?.promptTokens + ? 'Prompt Tokens: ' + + item.metrics[index]?.promptTokens + : 'Prompt Tokens: N/A' + } + sx={{ mr: 1, mb: 1 }} + />{' '} + } + label={ + item.metrics[index]?.completionTokens + ? 'Completion Tokens: ' + + item.metrics[index]?.completionTokens + : 'Completion Tokens: N/A' + } + sx={{ mr: 1, mb: 1 }} + />{' '} + + )} + {dialogProps.data.showCostMetrics && ( + <> + } + label={ + item.metrics[index]?.promptCost + ? 'Prompt Cost: ' + item.metrics[index]?.promptCost + : 'Prompt Cost: N/A' + } + sx={{ mr: 1, mb: 1 }} + />{' '} + } + label={ + item.metrics[index]?.completionCost + ? 'Completion Cost: ' + + item.metrics[index]?.completionCost + : 'Completion Cost: N/A' + } + sx={{ mr: 1, mb: 1 }} + />{' '} + + )} + } + label={ + item.metrics[index]?.apiLatency + ? 'API Latency: ' + item.metrics[index]?.apiLatency + : 'API Latency: N/A' + } + sx={{ mr: 1, mb: 1 }} + />{' '} + {dialogProps.data.showLatencyMetrics && ( + <> + {item.metrics[index]?.chain && ( + } + label={ + item.metrics[index]?.chain + ? 'Chain Latency: ' + item.metrics[index]?.chain + : 'Chain Latency: N/A' + } + sx={{ mr: 1, mb: 1 }} + /> + )}{' '} + {item.metrics[index]?.retriever && ( + } + size='small' + sx={{ mr: 1, mb: 1 }} + label={ + 'Retriever Latency: ' + + item.metrics[index]?.retriever + } + /> + )}{' '} + {item.metrics[index]?.tool && ( + } + size='small' + sx={{ mr: 1, mb: 1 }} + label={'Tool Latency: ' + item.metrics[index]?.tool} + /> + )}{' '} + } + size='small' + label={ + item.metrics[index]?.llm + ? 'LLM Latency: ' + item.metrics[index]?.llm + : 'LLM Latency: N/A' + } + sx={{ mr: 1, mb: 1 }} + />{' '} + + )} + + + ) : ( + + )} +
    + {dialogProps.data.customEvalsDefined && dialogProps.data.showCustomEvals && ( + + {(item.customEvals[index] || []).map((evaluator, index) => ( + + + + ))} + + )} + {dialogProps.data.evaluation?.evaluationType === 'llm' && ( + + {item.llmEvaluators[index] && ( + + {Object.entries(item.llmEvaluators[index]).map( + ([key, value], index) => ( + + {key}: {value} + + } + /> + ) + )} + + )} + + )} +
    + ))} +
    + ))} + +
    +
    +
    +
    +
    + ) : null + + return createPortal(component, portalElement) +} + +EvalsResultDialog.propTypes = { + show: PropTypes.bool, + dialogProps: PropTypes.object, + onCancel: PropTypes.func, + openDetailsDrawer: PropTypes.func +} + +export default EvalsResultDialog diff --git a/packages/ui/src/views/evaluations/EvaluationResult.jsx b/packages/ui/src/views/evaluations/EvaluationResult.jsx new file mode 100644 index 00000000000..e6d35079b4d --- /dev/null +++ b/packages/ui/src/views/evaluations/EvaluationResult.jsx @@ -0,0 +1,987 @@ +import React, { useEffect, useState } from 'react' +import { useDispatch, useSelector } from 'react-redux' +import { useNavigate } from 'react-router-dom' + +// material-ui +import { + TableContainer, + Table, + TableHead, + TableBody, + Divider, + Chip, + Paper, + Stack, + ButtonGroup, + Button, + Grid, + ListItem, + Box, + IconButton, + TableRow, + Skeleton, + TableCell +} from '@mui/material' +import { useTheme } from '@mui/material/styles' +import moment from 'moment' +import PaidIcon from '@mui/icons-material/Paid' +import LLMIcon from '@mui/icons-material/ModelTraining' +import AlarmIcon from '@mui/icons-material/AlarmOn' +import TokensIcon from '@mui/icons-material/AutoAwesomeMotion' + +// project imports +import MainCard from '@/ui-component/cards/MainCard' +import ViewHeader from '@/layout/MainLayout/ViewHeader' +import MetricsItemCard from '@/views/evaluations/MetricsItemCard' +import { ChartLatency } from '@/views/evaluations/ChartLatency' +import { ChartPassPrnt } from '@/views/evaluations/ChartPassPrnt' +import { ChartTokens } from '@/views/evaluations/ChartTokens' +import EvaluationResultSideDrawer from '@/views/evaluations/EvaluationResultSideDrawer' +import ErrorBoundary from '@/ErrorBoundary' +import { StyledTableCell, StyledTableRow } from '@/ui-component/table/TableStyles' +import ConfirmDialog from '@/ui-component/dialog/ConfirmDialog' +import EvaluationResultVersionsSideDrawer from '@/views/evaluations/EvaluationResultVersionsSideDrawer' +import EvalsResultDialog from '@/views/evaluations/EvalsResultDialog' +import { PermissionButton } from '@/ui-component/button/RBACButtons' + +// API +import useNotifier from '@/utils/useNotifier' +import useApi from '@/hooks/useApi' +import evaluationApi from '@/api/evaluations' + +// Hooks +import useConfirm from '@/hooks/useConfirm' +import { closeSnackbar as closeSnackbarAction, enqueueSnackbar as enqueueSnackbarAction } from '@/store/actions' + +// icons +import { + IconPercentage, + IconVectorBezier2, + IconMaximize, + IconClock, + IconAlertTriangle, + IconRun, + IconEye, + IconEyeOff, + IconX +} from '@tabler/icons-react' + +//const +import { useError } from '@/store/context/ErrorContext' + +// ==============================|| EvaluationResults ||============================== // + +const EvalEvaluationRows = () => { + const navigate = useNavigate() + const theme = useTheme() + const customization = useSelector((state) => state.customization) + const { confirm } = useConfirm() + const dispatch = useDispatch() + useNotifier() + const { error } = useError() + + const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) + const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) + + const [rows, setRows] = useState([]) + const [selectedEvaluationName, setSelectedEvaluationName] = useState('') + const [evaluation, setEvaluation] = useState({}) + + const [showCostMetrics, setShowCostMetrics] = useState(false) + const [showLatencyMetrics, setShowLatencyMetrics] = useState(false) + const [showTokenMetrics, setShowTokenMetrics] = useState(false) + const [showCustomEvals, setShowCustomEvals] = useState(false) + const [showCharts, setShowCharts] = useState(true) + + const [latencyChartData, setLatencyChartData] = useState([]) + const [tokensChartData, setTokensChartData] = useState([]) + const [passPrntChartData, setPassPcntChartData] = useState([]) + const [avgTokensUsed, setAvgTokensUsed] = useState() + + const [showSideDrawer, setShowSideDrawer] = useState(false) + const [sideDrawerDialogProps, setSideDrawerDialogProps] = useState({}) + + const [showVersionSideDrawer, setShowVersionSideDrawer] = useState(false) + const [versionDrawerDialogProps, setVersionDrawerDialogProps] = useState({}) + + const [outdated, setOutdated] = useState(null) + + const getEvaluation = useApi(evaluationApi.getEvaluation) + const getIsOutdatedApi = useApi(evaluationApi.getIsOutdated) + const runAgainApi = useApi(evaluationApi.runAgain) + + const [customEvalsDefined, setCustomEvalsDefined] = useState(false) + + const [showExpandTableDialog, setShowExpandTableDialog] = useState(false) + const [expandTableProps, setExpandTableProps] = useState({}) + const [isTableLoading, setTableLoading] = useState(false) + + const openDetailsDrawer = (item) => { + setSideDrawerDialogProps({ + type: 'View', + data: item, + evaluationType: evaluation.evaluationType, + evaluationChatflows: evaluation.chatflowName + }) + setShowSideDrawer(true) + } + + const closeDetailsDrawer = () => { + setShowSideDrawer(false) + } + + const openVersionsDrawer = () => { + setVersionDrawerDialogProps({ + id: evaluation?.id + }) + setShowVersionSideDrawer(true) + } + + const closeVersionsDrawer = () => { + setShowVersionSideDrawer(false) + } + + const handleShowChartsChange = () => { + setShowCharts(!showCharts) + } + + const handleShowTokenChange = () => { + setShowTokenMetrics(!showTokenMetrics) + } + + const handleLatencyMetricsChange = () => { + setShowLatencyMetrics(!showLatencyMetrics) + } + + const handleCustomEvalsChange = () => { + setShowCustomEvals(!showCustomEvals) + } + const handleDisplayCostChange = () => { + setShowCostMetrics(!showCostMetrics) + } + + const openTableDialog = () => { + setExpandTableProps({ + data: { + evaluation, + rows, + customEvalsDefined, + showCustomEvals, + showTokenMetrics, + showLatencyMetrics, + showCostMetrics + } + }) + setShowExpandTableDialog(true) + } + + const runAgain = async () => { + const confirmPayload = { + title: `Run Again`, + description: `Initiate Rerun for Evaluation ${evaluation.name}?`, + confirmButtonName: 'Yes', + cancelButtonName: 'No' + } + const isConfirmed = await confirm(confirmPayload) + + if (isConfirmed) { + runAgainApi.request(evaluation?.id) + enqueueSnackbar({ + message: "Evaluation '" + evaluation.name + "' is running. Redirecting to evaluations page.", + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + navigate(`/evaluations`) + } + } + + const URLpath = document.location.pathname.toString().split('/') + const evalId = URLpath[URLpath.length - 1] === 'evaluation_rows' ? '' : URLpath[URLpath.length - 1] + + const goBack = () => { + navigate(`/evaluations`) + } + + const getColSpan = (evaluationsShown, llmEvaluations) => { + let colSpan = 1 + if (evaluationsShown) colSpan++ + if (llmEvaluations) colSpan++ + return colSpan + } + + useEffect(() => { + getEvaluation.request(evalId) + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []) + + useEffect(() => { + setTableLoading(getEvaluation.loading) + }, [getEvaluation.loading]) + + useEffect(() => { + if (getIsOutdatedApi.data) { + if (getIsOutdatedApi.data.isOutdated) { + setOutdated(getIsOutdatedApi.data) + } + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getIsOutdatedApi.data]) + + useEffect(() => { + if (getEvaluation.data) { + const data = getEvaluation.data + setSelectedEvaluationName(data.name) + getIsOutdatedApi.request(data.id) + data.chatflowId = typeof data.chatflowId === 'object' ? data.chatflowId : JSON.parse(data.chatflowId) + data.chatflowName = typeof data.chatflowName === 'object' ? data.chatflowName : JSON.parse(data.chatflowName) + const rows = getEvaluation.data.rows + const latencyChartData = [] + const tokensChartData = [] + let totalTokens = 0 + for (let i = 0; i < rows.length; i++) { + rows[i].metrics = typeof rows[i].metrics === 'object' ? rows[i].metrics : JSON.parse(rows[i].metrics) + rows[i].actualOutput = typeof rows[i].actualOutput === 'object' ? rows[i].actualOutput : JSON.parse(rows[i].actualOutput) + rows[i].customEvals = typeof rows[i].evaluators === 'object' ? rows[i].evaluators : JSON.parse(rows[i].evaluators || []) + const latencyObj = { + y: i + 1 + } + const tokensObj = { + y: i + 1 + } + for (let m = 0; m < rows[i].metrics.length; m++) { + if (rows[i].metrics[m]?.apiLatency > 0) { + latencyObj[data.chatflowName[m]] = parseFloat(rows[i].metrics[m]?.apiLatency, 10) + } + if (rows[i].metrics[m]?.totalTokens) { + totalTokens += rows[i].metrics[m]?.totalTokens + tokensObj[data.chatflowName[m] + ' Prompt'] = rows[i].metrics[m]?.promptTokens + tokensObj[data.chatflowName[m] + ' Completion'] = rows[i].metrics[m]?.completionTokens + } + } + latencyChartData.push(latencyObj) + tokensChartData.push(tokensObj) + if (rows[i].llmEvaluators) { + rows[i].llmEvaluators = + typeof rows[i].llmEvaluators === 'object' ? rows[i].llmEvaluators : JSON.parse(rows[i].llmEvaluators || []) + } + if ( + rows[i].errors && + typeof rows[i].errors === 'string' && + rows[i].errors.startsWith('[') && + rows[i].errors.endsWith(']') + ) { + rows[i].errors = JSON.parse(rows[i].errors) || [] + } + } + setRows(rows) + setLatencyChartData(latencyChartData) + setTokensChartData(tokensChartData) + const evaluation = data + evaluation.average_metrics = + typeof evaluation.average_metrics === 'object' ? evaluation.average_metrics : JSON.parse(evaluation.average_metrics) + const passPntData = [] + setCustomEvalsDefined(data?.average_metrics?.passPcnt >= 0) + setShowCustomEvals(data?.average_metrics?.passPcnt >= 0) + if (data?.average_metrics?.passCount >= 0) { + passPntData.push({ + name: 'Pass', + value: data.average_metrics.passCount + }) + } + if (data?.average_metrics?.failCount >= 0) { + passPntData.push({ + name: 'Fail', + value: data.average_metrics.failCount + }) + } + if (data?.average_metrics?.errorCount >= 0) { + passPntData.push({ + name: 'Error', + value: data.average_metrics.errorCount + }) + } + setPassPcntChartData(passPntData) + setAvgTokensUsed((totalTokens / rows.length).toFixed(2)) + setEvaluation(evaluation) + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getEvaluation.data]) + + return ( + <> + + {error ? ( + + ) : ( + + + {evaluation?.versionCount > 1 && ( + + )} + {evaluation?.versionCount > 1 && ( + + )} + } + variant='contained' + color='primary' + disabled={outdated?.errors?.length > 0} + onClick={runAgain} + > + Re-run Evaluation + + + + + {outdated && ( +
    + + + + + + {outdated?.errors?.length > 0 && ( + This evaluation cannot be re-run, due to the following errors + )} + {outdated?.errors?.length === 0 && ( + The following items are outdated, re-run the evaluation for the latest results. + )} + + {outdated.dataset && outdated?.errors?.length === 0 && ( + <> +
    + Dataset: + navigate(`/dataset_rows/${outdated.dataset.id}`)} + > + + )} + {outdated.chatflows && outdated?.errors?.length === 0 && outdated.chatflows.length > 0 && ( + <> +
    + Chatflows: + + {outdated.chatflows.map((chatflow, index) => ( + navigate(`/canvas/${chatflow.chatflowId}`)} + > + ))} + + + )} + {outdated.errors.length > 0 && + outdated.errors.map((error, index) => {error})} + setOutdated(null)} + > + + +
    +
    + )} + + + {customEvalsDefined && ( + + )} + + + + + {showCharts && ( + + {customEvalsDefined && ( + + + }} + component={} + /> + + )} + {avgTokensUsed !== undefined && !isNaN(avgTokensUsed) && ( + + + }} + component={ + + } + /> + + )} + {evaluation.average_metrics?.averageLatency !== undefined && ( + + + }} + component={ + + } + /> + + )} + + )} + + +
    + + Chatflows Used: +
    + {(evaluation.chatflowName || []).map((chatflowUsed, index) => ( + navigate('/canvas/' + evaluation.chatflowId[index])} + > + ))} +
    + +
    + + + + +   + Input + Expected Output + {evaluation.chatflowId?.map((chatflowId, index) => ( + + + {evaluation.chatflowName[index]} + {rows.length > 0 && rows[0].metrics[index].model && ( + } + color={'info'} + size='small' + label={ + rows[0].metrics[index].model + + (rows[0].metrics[index].provider + ? ' [' + rows[0].metrics[index].provider + ']' + : '') + } + sx={{ ml: 2 }} + /> + )} + + + ))} + + + {evaluation.chatflowId?.map((chatflowId, index) => ( + + + Actual Output + + {customEvalsDefined && showCustomEvals && Evaluator} + {evaluation?.evaluationType === 'llm' && LLM Evaluation} + + ))} + + + + {isTableLoading ? ( + <> + + + + + + + + + + + + + + + + + + + + + + + + ) : ( + <> + {rows.length > 0 && + rows.map((item, index) => ( + openDetailsDrawer(item)} + hover + key={index} + sx={{ cursor: 'pointer', '&:last-child td, &:last-child th': { border: 0 } }} + > + {index + 1} + {item.input} + {item.expectedOutput} + {evaluation.chatflowId?.map((_, index) => ( + + + {item.errors[index] === '' ? ( + <> +
    + {item.actualOutput[index]} +
    + + } + size='small' + label={ + item.metrics[index]?.totalCost + ? 'Total Cost: ' + + item.metrics[index]?.totalCost + : 'Total Cost: N/A' + } + sx={{ mr: 1, mb: 1 }} + /> + } + label={ + item.metrics[index]?.totalTokens + ? 'Total Tokens: ' + + item.metrics[index]?.totalTokens + : 'Total Tokens: N/A' + } + sx={{ mr: 1, mb: 1 }} + /> + {showTokenMetrics && ( + <> + } + label={ + item.metrics[index]?.promptTokens + ? 'Prompt Tokens: ' + + item.metrics[index]?.promptTokens + : 'Prompt Tokens: N/A' + } + sx={{ mr: 1, mb: 1 }} + />{' '} + } + label={ + item.metrics[index]?.completionTokens + ? 'Completion Tokens: ' + + item.metrics[index]?.completionTokens + : 'Completion Tokens: N/A' + } + sx={{ mr: 1, mb: 1 }} + />{' '} + + )} + {showCostMetrics && ( + <> + } + label={ + item.metrics[index]?.promptCost + ? 'Prompt Cost: ' + + item.metrics[index]?.promptCost + : 'Prompt Cost: N/A' + } + sx={{ mr: 1, mb: 1 }} + />{' '} + } + label={ + item.metrics[index]?.completionCost + ? 'Completion Cost: ' + + item.metrics[index]?.completionCost + : 'Completion Cost: N/A' + } + sx={{ mr: 1, mb: 1 }} + />{' '} + + )} + } + label={ + item.metrics[index]?.apiLatency + ? 'API Latency: ' + + item.metrics[index]?.apiLatency + : 'API Latency: N/A' + } + sx={{ mr: 1, mb: 1 }} + />{' '} + {showLatencyMetrics && ( + <> + {item.metrics[index]?.chain && ( + } + label={ + item.metrics[index]?.chain + ? 'Chain Latency: ' + + item.metrics[index]?.chain + : 'Chain Latency: N/A' + } + sx={{ mr: 1, mb: 1 }} + /> + )}{' '} + {item.metrics[index]?.retriever && ( + } + size='small' + sx={{ mr: 1, mb: 1 }} + label={ + 'Retriever Latency: ' + + item.metrics[index]?.retriever + } + /> + )}{' '} + {item.metrics[index]?.tool && ( + } + size='small' + sx={{ mr: 1, mb: 1 }} + label={ + 'Tool Latency: ' + + item.metrics[index]?.tool + } + /> + )}{' '} + } + size='small' + label={ + item.metrics[index]?.llm + ? 'LLM Latency: ' + + item.metrics[index]?.llm + : 'LLM Latency: N/A' + } + sx={{ mr: 1, mb: 1 }} + />{' '} + + )} + + + ) : ( + + )} +
    + {customEvalsDefined && showCustomEvals && ( + + {(item.customEvals[index] || []).map((evaluator, index) => ( + + + + ))} + + )} + {evaluation?.evaluationType === 'llm' && ( + + {item.llmEvaluators[index] && ( + + {Object.entries(item.llmEvaluators[index]).map( + ([key, value], index) => ( + + {key.toUpperCase()}: {value} + + } + /> + ) + )} + + )} + + )} +
    + ))} +
    + ))} + + )} +
    +
    +
    + {showSideDrawer && ( + + )} + {showVersionSideDrawer && ( + { + setShowVersionSideDrawer(false) + navigate(`/evaluation_results/${versionId}`) + navigate(0) + }} + /> + )} +
    + )} +
    + + setShowExpandTableDialog(false)} + openDetailsDrawer={(item) => { + openDetailsDrawer(item) + }} + /> + + ) +} + +export default EvalEvaluationRows diff --git a/packages/ui/src/views/evaluations/EvaluationResultSideDrawer.jsx b/packages/ui/src/views/evaluations/EvaluationResultSideDrawer.jsx new file mode 100644 index 00000000000..9a3ada22be9 --- /dev/null +++ b/packages/ui/src/views/evaluations/EvaluationResultSideDrawer.jsx @@ -0,0 +1,332 @@ +import PropTypes from 'prop-types' +import { CardContent, Card, Box, SwipeableDrawer, Stack, Button, Chip, Divider, Typography } from '@mui/material' +import { useSelector } from 'react-redux' +import { IconSquareRoundedChevronsRight } from '@tabler/icons-react' +import { evaluators as evaluatorsOptions, numericOperators } from '../evaluators/evaluatorConstant' + +const EvaluationResultSideDrawer = ({ show, dialogProps, onClickFunction }) => { + const onOpen = () => {} + const customization = useSelector((state) => state.customization) + + const getEvaluatorValue = (evaluator) => { + if (evaluator.type === 'text') { + return '"' + evaluator.value + '"' + } else if (evaluator.name === 'json') { + return '' + } else if (evaluator.type === 'numeric') { + return evaluator.value + } + return '' + } + + return ( + onClickFunction()} onOpen={onOpen}> + + + + + Evaluation Id + + {dialogProps.data.evaluationId} + + +
    + + + +
    + + Input + + {dialogProps.data.input} +
    + +
    + + + +
    + + Expected Output + + {dialogProps.data.expectedOutput} +
    + + {dialogProps.data && + dialogProps.data.actualOutput?.length > 0 && + dialogProps.data.actualOutput.map((output, index) => ( + + + {dialogProps.evaluationChatflows?.length > 0 && ( + <> + + + Chatflow + + {dialogProps.evaluationChatflows[index]} + +
    + + + )} + +
    + + {dialogProps.data.errors[index] === '' ? 'Actual Output' : 'Error'} + + + {dialogProps.data.errors[index] === '' ? ( + dialogProps.data.actualOutput[index] + ) : ( + + )} + +
    +
    + + +
    + + Latency Metrics + + + + + {dialogProps.data.metrics[index]?.chain && ( + + )} + {dialogProps.data.metrics[index]?.retriever && ( + + )} + {dialogProps.data.metrics[index]?.tool && ( + + )} + + + +
    +
    + +
    + + + Tokens + + + + + + + + + +
    + + + Cost + + + + + + + + + +
    + +
    + {dialogProps.data?.customEvals && + dialogProps.data?.customEvals[index] && + dialogProps.data.customEvals[index].length > 0 && ( + + + Custom Evaluators + + + {dialogProps.data.customEvals[index] && + dialogProps.data.customEvals[index].map((evaluator, index) => ( + + + + opt.name === evaluator.measure + )?.label || 'Actual Output' + } ${ + [...evaluatorsOptions, ...numericOperators] + .find((opt) => opt.name === evaluator.operator) + ?.label.toLowerCase() || '' + } ${getEvaluatorValue(evaluator)}`} + > + + ))} + + + )} + {dialogProps?.evaluationType === 'llm' && ( + <> +
    + + +
    + + LLM Graded + + + {Object.entries(dialogProps.data.llmEvaluators[index]).map(([key, value], index) => ( + + {key}: {value} + + } + /> + ))} + +
    + + )} +
    +
    + ))} +
    +
    + ) +} + +EvaluationResultSideDrawer.propTypes = { + show: PropTypes.bool, + dialogProps: PropTypes.object, + onClickFunction: PropTypes.func +} + +export default EvaluationResultSideDrawer diff --git a/packages/ui/src/views/evaluations/EvaluationResultVersionsSideDrawer.jsx b/packages/ui/src/views/evaluations/EvaluationResultVersionsSideDrawer.jsx new file mode 100644 index 00000000000..91f5ad7c894 --- /dev/null +++ b/packages/ui/src/views/evaluations/EvaluationResultVersionsSideDrawer.jsx @@ -0,0 +1,84 @@ +import { useEffect, useState } from 'react' +import PropTypes from 'prop-types' +import moment from 'moment/moment' + +import { Button, Box, SwipeableDrawer } from '@mui/material' +import { IconSquareRoundedChevronsRight } from '@tabler/icons-react' +import { + Timeline, + TimelineConnector, + TimelineContent, + TimelineDot, + TimelineItem, + TimelineOppositeContent, + timelineOppositeContentClasses, + TimelineSeparator +} from '@mui/lab' + +import evaluationApi from '@/api/evaluations' +import useApi from '@/hooks/useApi' + +const EvaluationResultVersionsSideDrawer = ({ show, dialogProps, onClickFunction, onSelectVersion }) => { + const onOpen = () => {} + const [versions, setVersions] = useState([]) + + const getVersionsApi = useApi(evaluationApi.getVersions) + + useEffect(() => { + getVersionsApi.request(dialogProps.id) + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [dialogProps]) + + useEffect(() => { + if (getVersionsApi.data) { + setVersions(getVersionsApi.data.versions) + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getVersionsApi.data]) + + const navigateToEvaluationResult = (id) => { + onSelectVersion(id) + } + + return ( + onClickFunction()} onOpen={onOpen}> + + + + {versions && + versions.map((version, index) => ( + + + {moment(version.runDate).format('DD-MMM-YYYY, hh:mm:ss A')} + + + + {index !== versions.length - 1 && } + + + + + + ))} + + + + ) +} + +EvaluationResultVersionsSideDrawer.propTypes = { + show: PropTypes.bool, + dialogProps: PropTypes.object, + onClickFunction: PropTypes.func, + onSelectVersion: PropTypes.func +} + +export default EvaluationResultVersionsSideDrawer diff --git a/packages/ui/src/views/evaluations/MetricsItemCard.jsx b/packages/ui/src/views/evaluations/MetricsItemCard.jsx new file mode 100644 index 00000000000..a8bdca820ef --- /dev/null +++ b/packages/ui/src/views/evaluations/MetricsItemCard.jsx @@ -0,0 +1,58 @@ +import PropTypes from 'prop-types' + +// material-ui +import { styled } from '@mui/material/styles' +import { Box, Grid, Typography } from '@mui/material' + +// project imports +import MainCard from '@/ui-component/cards/MainCard' +import SkeletonChatflowCard from '@/ui-component/cards/Skeleton/ChatflowCard' + +const CardWrapper = styled(MainCard)(({ theme }) => ({ + background: theme.palette.card.main, + color: theme.darkTextPrimary, + overflow: 'auto', + position: 'relative', + boxShadow: '0 2px 14px 0 rgb(32 40 45 / 8%)', + cursor: 'pointer', + '&:hover': { + background: theme.palette.card.hover, + boxShadow: '0 2px 14px 0 rgb(32 40 45 / 20%)' + }, + overflowWrap: 'break-word', + whiteSpace: 'pre-line' +})) + +const MetricsItemCard = ({ isLoading, data, component }) => { + return ( + <> + {isLoading ? ( + + ) : ( + + + + + + {data.icon} + + {data.header} + + + + + + {component} + + )} + + ) +} + +MetricsItemCard.propTypes = { + isLoading: PropTypes.bool, + data: PropTypes.object, + component: PropTypes.element +} + +export default MetricsItemCard diff --git a/packages/ui/src/views/evaluations/index.jsx b/packages/ui/src/views/evaluations/index.jsx new file mode 100644 index 00000000000..2f28e1b1300 --- /dev/null +++ b/packages/ui/src/views/evaluations/index.jsx @@ -0,0 +1,819 @@ +import React, { useEffect, useState } from 'react' +import * as PropTypes from 'prop-types' +import moment from 'moment/moment' +import { useNavigate } from 'react-router-dom' +import { useDispatch, useSelector } from 'react-redux' + +// material-ui +import { + Checkbox, + Skeleton, + TableCell, + Box, + Button, + Chip, + Collapse, + IconButton, + Paper, + Stack, + Table, + TableBody, + TableContainer, + TableHead, + TableRow +} from '@mui/material' +import { useTheme } from '@mui/material/styles' +import { closeSnackbar as closeSnackbarAction, enqueueSnackbar as enqueueSnackbarAction } from '@/store/actions' + +// API +import evaluationApi from '@/api/evaluations' +import useApi from '@/hooks/useApi' + +// Hooks +import useConfirm from '@/hooks/useConfirm' +import useNotifier from '@/utils/useNotifier' + +// project +import MainCard from '@/ui-component/cards/MainCard' +import { StyledButton } from '@/ui-component/button/StyledButton' +import { BackdropLoader } from '@/ui-component/loading/BackdropLoader' +import ConfirmDialog from '@/ui-component/dialog/ConfirmDialog' +import ErrorBoundary from '@/ErrorBoundary' +import ViewHeader from '@/layout/MainLayout/ViewHeader' +import { StyledTableCell, StyledTableRow } from '@/ui-component/table/TableStyles' +import CreateEvaluationDialog from '@/views/evaluations/CreateEvaluationDialog' +import { StyledPermissionButton } from '@/ui-component/button/RBACButtons' + +// icons +import { + IconChartHistogram, + IconPlus, + IconChartBar, + IconRefresh, + IconTrash, + IconX, + IconChevronsUp, + IconChevronsDown +} from '@tabler/icons-react' +import empty_evalSVG from '@/assets/images/empty_evals.svg' + +import { useError } from '@/store/context/ErrorContext' + +const EvalsEvaluation = () => { + const theme = useTheme() + const customization = useSelector((state) => state.customization) + const { confirm } = useConfirm() + const dispatch = useDispatch() + useNotifier() + const { error } = useError() + + const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) + const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) + + const createNewEvaluation = useApi(evaluationApi.createEvaluation) + const getAllEvaluations = useApi(evaluationApi.getAllEvaluations) + + const [showNewEvaluationDialog, setShowNewEvaluationDialog] = useState(false) + const [dialogProps, setDialogProps] = useState({}) + const [rows, setRows] = useState([]) + const [loading, setLoading] = useState(false) + const [isTableLoading, setTableLoading] = useState(false) + const [selected, setSelected] = useState([]) + + const onSelectAllClick = (event) => { + if (event.target.checked) { + const newSelected = rows.filter((item) => item?.latestEval).map((n) => n.id) + setSelected(newSelected) + return + } + setSelected([]) + } + + const handleSelect = (event, id) => { + const selectedIndex = selected.indexOf(id) + let newSelected = [] + + if (selectedIndex === -1) { + newSelected = newSelected.concat(selected, id) + } else if (selectedIndex === 0) { + newSelected = newSelected.concat(selected.slice(1)) + } else if (selectedIndex === selected.length - 1) { + newSelected = newSelected.concat(selected.slice(0, -1)) + } else if (selectedIndex > 0) { + newSelected = newSelected.concat(selected.slice(0, selectedIndex), selected.slice(selectedIndex + 1)) + } + setSelected(newSelected) + } + + const createEvaluation = () => { + const dialogProp = { + type: 'ADD', + cancelButtonName: 'Cancel', + confirmButtonName: 'Start New Evaluation', + data: {} + } + setDialogProps(dialogProp) + setShowNewEvaluationDialog(true) + } + + const deleteEvaluationsAllVersions = async () => { + const confirmPayload = { + title: `Delete`, + description: `Delete ${selected.length} ${ + selected.length > 1 ? 'evaluations' : 'evaluation' + }? This will delete all versions of the evaluation.`, + confirmButtonName: 'Delete', + cancelButtonName: 'Cancel' + } + const isConfirmed = await confirm(confirmPayload) + + if (isConfirmed) { + try { + const isDeleteAllVersion = true + const deleteResp = await evaluationApi.deleteEvaluations(selected, isDeleteAllVersion) + if (deleteResp.data) { + enqueueSnackbar({ + message: `${selected.length} ${selected.length > 1 ? 'evaluations' : 'evaluation'} deleted`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + onRefresh() + } + } catch (error) { + enqueueSnackbar({ + message: `Failed to delete ${selected.length > 1 ? 'evaluations' : 'evaluation'}: ${ + typeof error.response.data === 'object' ? error.response.data.message : error.response.data + }`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + } + setSelected([]) + } + } + + useEffect(() => { + getAllEvaluations.request() + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []) + + useEffect(() => { + if (getAllEvaluations.data) { + const evalRows = getAllEvaluations.data + if (evalRows) { + // Prepare the data for the table + for (let i = 0; i < evalRows.length; i++) { + const evalRow = evalRows[i] + evalRows[i].runDate = moment(evalRow.runDate).format('DD-MMM-YYYY, hh:mm:ss A') + evalRows[i].average_metrics = + typeof evalRow.average_metrics === 'object' ? evalRow.average_metrics : JSON.parse(evalRow.average_metrics) + evalRows[i].usedFlows = + typeof evalRow.chatflowName === 'object' ? evalRow.chatflowName : JSON.parse(evalRow.chatflowName) + evalRows[i].chatIds = typeof evalRow.chatflowId === 'object' ? evalRow.chatflowId : JSON.parse(evalRow.chatflowId) + } + setRows(evalRows) + } + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getAllEvaluations.data]) + + useEffect(() => { + if (createNewEvaluation.data) { + const evalRows = createNewEvaluation.data + for (let i = 0; i < evalRows.length; i++) { + const evalRow = evalRows[i] + evalRows[i].runDate = moment(evalRow.runDate).format('DD-MMM-YYYY, hh:mm:ss A') + evalRows[i].average_metrics = + typeof evalRow.average_metrics === 'object' ? evalRow.average_metrics : JSON.parse(evalRow.average_metrics) + evalRows[i].usedFlows = typeof evalRow.chatflowName === 'object' ? evalRow.chatflowName : JSON.parse(evalRow.chatflowName) + evalRows[i].chatIds = typeof evalRow.chatflowId === 'object' ? evalRow.chatflowId : JSON.parse(evalRow.chatflowId) + } + setRows(evalRows) + } + setLoading(false) + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [createNewEvaluation.data]) + + const onConfirm = (evaluationData) => { + setShowNewEvaluationDialog(false) + setLoading(true) + createNewEvaluation.request(evaluationData) + } + + useEffect(() => { + if (createNewEvaluation.error) { + // Change to Notifstack + enqueueSnackbar({ + message: `Failed to create new evaluation: ${ + typeof createNewEvaluation.error.response?.data === 'object' + ? createNewEvaluation.error.response.data.message + : createNewEvaluation.error.response?.data || createNewEvaluation.error.message || 'Unknown error' + }`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + setLoading(false) + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [createNewEvaluation.error]) + + const onRefresh = () => { + getAllEvaluations.request() + } + + useEffect(() => { + setTableLoading(getAllEvaluations.loading) + }, [getAllEvaluations.loading]) + + return ( + <> + + {error ? ( + + ) : ( + + + } + > + Refresh + + } + > + New Evaluation + + + {selected.length > 0 && ( + } + > + Delete {selected.length} {selected.length === 1 ? 'evaluation' : 'evaluations'} + + )} + {!isTableLoading && rows.length <= 0 ? ( + + + empty_evalSVG + +
    No Evaluations Yet
    +
    + ) : ( + + + + + + item?.latestEval) || []).length} + onChange={onSelectAllClick} + inputProps={{ + 'aria-label': 'select all' + }} + /> + + + Name + Latest Version + Average Metrics + Last Evaluated + Chatflow(s) + Dataset + + + + + {isTableLoading ? ( + <> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ) : ( + <> + {rows + .filter((item) => item?.latestEval) + .map((item, index) => ( + row.name === item.name)} + item={item} + key={index} + theme={theme} + selected={selected} + customization={customization} + onRefresh={onRefresh} + handleSelect={handleSelect} + /> + ))} + + )} + +
    +
    + )} +
    + )} +
    + {showNewEvaluationDialog && ( + setShowNewEvaluationDialog(false)} + onConfirm={onConfirm} + > + )} + + {loading && } + + ) +} + +function EvaluationRunRow(props) { + const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) + const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) + + const [open, setOpen] = useState(false) + const [childSelected, setChildSelected] = useState([]) + + const theme = useTheme() + const navigate = useNavigate() + const { confirm } = useConfirm() + const dispatch = useDispatch() + + const showResults = (item) => { + navigate(`/evaluation_results/${item.id}`) + } + + const goToDataset = (id) => { + navigate(`/dataset_rows/${id}`) + } + + const onSelectAllChildClick = (event) => { + if (event.target.checked) { + const newSelected = (props?.rows || []).map((n) => n.id) + setChildSelected(newSelected) + return + } + setChildSelected([]) + } + + const handleSelectChild = (event, id) => { + const selectedIndex = childSelected.indexOf(id) + let newSelected = [] + + if (selectedIndex === -1) { + newSelected = newSelected.concat(childSelected, id) + } else if (selectedIndex === 0) { + newSelected = newSelected.concat(childSelected.slice(1)) + } else if (selectedIndex === childSelected.length - 1) { + newSelected = newSelected.concat(childSelected.slice(0, -1)) + } else if (selectedIndex > 0) { + newSelected = newSelected.concat(childSelected.slice(0, selectedIndex), childSelected.slice(selectedIndex + 1)) + } + setChildSelected(newSelected) + } + + const deleteChildEvaluations = async () => { + const confirmPayload = { + title: `Delete`, + description: `Delete ${childSelected.length} ${childSelected.length > 1 ? 'evaluations' : 'evaluation'}?`, + confirmButtonName: 'Delete', + cancelButtonName: 'Cancel' + } + const isConfirmed = await confirm(confirmPayload) + + if (isConfirmed) { + try { + const deleteResp = await evaluationApi.deleteEvaluations(childSelected) + if (deleteResp.data) { + enqueueSnackbar({ + message: `${childSelected.length} evaluations deleted.`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + props.onRefresh() + } + } catch (error) { + enqueueSnackbar({ + message: `Failed to delete Evaluation: ${ + typeof error.response.data === 'object' ? error.response.data.message : error.response.data + }`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + } + } + } + + const goToCanvas = (id) => { + navigate(`/canvas/${id}`) + } + + const getStatusColor = (status) => { + switch (status) { + case 'pending': + return '#ffc107' + case 'completed': + return '#52b69a' + case 'error': + return '#f44336' + default: + return '#bcbcbc' + } + } + + const getPassRateColor = (passPcnt) => { + if (passPcnt > 90) { + return '#52b69a' + } else if (passPcnt >= 50) { + return '#f48c06' + } else { + return '#f44336' + } + } + + return ( + + + + props.handleSelect(event, props.item.id)} + /> + + +
    +
    + {props.item.name} + + {props.item.version}{' '} + {props.item.version > 0 && ( + setOpen(!open)}> + {props.item.version > 0 && open ? : } + + )} + + + + + {props.item.average_metrics?.averageCost && ( + + )} + + {props.item.average_metrics?.passPcnt >= 0 && ( + + )} + + + {moment(props.item.runDate).format('DD-MMM-YYYY, hh:mm:ss A')} + + + {props.item?.usedFlows?.map((usedFlow, index) => ( + goToCanvas(props.item.chatIds[index])} + > + ))} + + + + goToDataset(props.item.datasetId)} + > + + + showResults(props.item)} + > + + + +
    + {open && childSelected.length > 0 && ( + + + + + + )} + {open && ( + <> + + + + + + + + + + + Version + Last Run + Average Metrics + Status + + + + + {props.rows.length > 0 && + props.rows.map((childItem, childIndex) => ( + + + + handleSelectChild(event, childItem.id)} + /> + + {childItem.version} + + {moment(childItem.runDate).format('DD-MMM-YYYY, hh:mm:ss A')} + + + + + {childItem.average_metrics?.averageCost && ( + + )} + + {childItem.average_metrics?.passPcnt >= 0 && ( + + )} + + + + + + + showResults(childItem)} + > + + + + + + ))} + +
    +
    +
    +
    +
    + + )} +
    + ) +} +EvaluationRunRow.propTypes = { + item: PropTypes.object, + selected: PropTypes.array, + rows: PropTypes.arrayOf(PropTypes.object), + theme: PropTypes.any, + customization: PropTypes.object, + onRefresh: PropTypes.func, + handleSelect: PropTypes.func +} +export default EvalsEvaluation diff --git a/packages/ui/src/views/evaluators/AddEditEvaluatorDialog.jsx b/packages/ui/src/views/evaluators/AddEditEvaluatorDialog.jsx new file mode 100644 index 00000000000..d113b9a2975 --- /dev/null +++ b/packages/ui/src/views/evaluators/AddEditEvaluatorDialog.jsx @@ -0,0 +1,556 @@ +import { createPortal } from 'react-dom' +import PropTypes from 'prop-types' +import { useState, useEffect, useCallback, useMemo } from 'react' +import { useDispatch } from 'react-redux' +import { enqueueSnackbar as enqueueSnackbarAction, closeSnackbar as closeSnackbarAction } from '@/store/actions' +import { cloneDeep } from 'lodash' + +// Material +import { IconButton, Dialog, DialogActions, DialogContent, DialogTitle, Box, Typography, OutlinedInput, Button, Stack } from '@mui/material' +import { GridActionsCellItem } from '@mui/x-data-grid' + +// Project imports +import ConfirmDialog from '@/ui-component/dialog/ConfirmDialog' +import ExpandTextDialog from '@/ui-component/dialog/ExpandTextDialog' +import { StyledPermissionButton } from '@/ui-component/button/RBACButtons' +import { Dropdown } from '@/ui-component/dropdown/Dropdown' +import { TooltipWithParser } from '@/ui-component/tooltip/TooltipWithParser' +import { Grid } from '@/ui-component/grid/Grid' +import SamplePromptDialog from '@/views/evaluators/SamplePromptDialog' + +// Icons +import { IconBulb, IconArrowsMaximize, IconPlus, IconPuzzle, IconX, IconNotes } from '@tabler/icons-react' +import DeleteIcon from '@mui/icons-material/Delete' + +// API +import evaluatorsApi from '@/api/evaluators' + +// utils +import useNotifier from '@/utils/useNotifier' + +// const +import { HIDE_CANVAS_DIALOG, SHOW_CANVAS_DIALOG } from '@/store/actions' +import { evaluators, evaluatorTypes, numericOperators } from './evaluatorConstant' + +const AddEditEvaluatorDialog = ({ show, dialogProps, onCancel, onConfirm }) => { + const portalElement = document.getElementById('portal') + + const dispatch = useDispatch() + + // ==============================|| Snackbar ||============================== // + + useNotifier() + + const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) + const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) + + const [name, setName] = useState('') + const [evaluatorType, setEvaluatorType] = useState('') + const [availableEvaluators, setAvailableEvaluators] = useState([]) + const [selectedEvaluator, setSelectedEvaluator] = useState() + const [selectedValue, setSelectedValue] = useState('') + const [selectedMetricValue, setSelectedMetricValue] = useState('0') + const [selectedMetricOperator, setSelectedMetricOperator] = useState('equals') + + const [showExpandDialog, setShowExpandDialog] = useState(false) + const [expandDialogProps, setExpandDialogProps] = useState({}) + + const [showSamplePromptDialog, setShowSamplePromptDialog] = useState(false) + const [samplePromptDialogProps, setSamplePromptDialogProps] = useState({}) + + const [outputSchema, setOutputSchema] = useState([]) + const [prompt, setPrompt] = useState('') + + const deleteItem = useCallback( + (id) => () => { + setTimeout(() => { + setOutputSchema((prevRows) => prevRows.filter((row) => row.id !== id)) + }) + }, + [] + ) + + const onSamplePromptSelected = (data) => { + setPrompt(data.prompt) + setOutputSchema(data.json) + setShowSamplePromptDialog(false) + } + + const onShowPromptDialogClicked = (inputParam) => { + const dialogProps = { + value: prompt, + inputParam, + confirmButtonName: 'Save', + cancelButtonName: 'Cancel' + } + setSamplePromptDialogProps(dialogProps) + setShowSamplePromptDialog(true) + } + const onExpandDialogClicked = (inputParam) => { + const dialogProps = { + value: prompt, + inputParam, + confirmButtonName: 'Save', + cancelButtonName: 'Cancel' + } + setExpandDialogProps(dialogProps) + setShowExpandDialog(true) + } + + const onExpandDialogSave = (newValue) => { + setShowExpandDialog(false) + setPrompt(newValue) + } + + const addNewRow = () => { + setTimeout(() => { + setOutputSchema((prevRows) => { + let allRows = [...cloneDeep(prevRows)] + const lastRowId = allRows.length ? allRows[allRows.length - 1].id + 1 : 1 + allRows.push({ + id: lastRowId, + property: '', + description: '', + type: '', + required: false + }) + return allRows + }) + }) + } + + const onRowUpdate = (newRow) => { + setTimeout(() => { + setOutputSchema((prevRows) => { + let allRows = [...cloneDeep(prevRows)] + const indexToUpdate = allRows.findIndex((row) => row.id === newRow.id) + if (indexToUpdate >= 0) { + allRows[indexToUpdate] = { ...newRow } + } + return allRows + }) + }) + } + + const columns = useMemo( + () => [ + { field: 'property', headerName: 'Property', editable: true, flex: 1 }, + { + field: 'type', + headerName: 'Type', + type: 'singleSelect', + valueOptions: ['string', 'number', 'boolean'], + editable: true, + width: 120 + }, + { field: 'description', headerName: 'Description', editable: true, flex: 1 }, + { field: 'required', headerName: 'Required', type: 'boolean', editable: true, width: 80 }, + { + field: 'actions', + type: 'actions', + width: 80, + getActions: (params) => [ + } label='Delete' onClick={deleteItem(params.id)} /> + ] + } + ], + [deleteItem] + ) + + const onEvaluatorTypeChange = (type) => { + setEvaluatorType(type) + setAvailableEvaluators(evaluators.filter((item) => item.type === type)) + setSelectedEvaluator('') + setSelectedValue('') + } + + const getCaption = () => { + if (selectedEvaluator) { + // return the description of the selected evaluator + const e = availableEvaluators.find((item) => item.name === selectedEvaluator) + if (e) { + return e.description + } + } + return '' + } + + const disableButton = () => { + if (!name || !evaluatorType) { + return true + } + if (evaluatorType === 'text') { + return !selectedEvaluator || !selectedValue + } else if (evaluatorType === 'numeric') { + return !selectedEvaluator || !selectedMetricOperator || !selectedMetricValue + } else if (evaluatorType === 'llm') { + return !prompt || outputSchema.length === 0 + } + } + + const updateEvaluator = async () => { + try { + const data = prepareData() + + const updateResp = await evaluatorsApi.updateEvaluator(dialogProps.data.id, data) + if (updateResp.data) { + enqueueSnackbar({ + message: `Evaluator ${name} updated`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + onConfirm(updateResp.data.id) + } + } catch (error) { + enqueueSnackbar({ + message: `Failed to update Evaluator ${name}: ${ + typeof error.response.data === 'object' ? error.response.data.message : error.response.data + }`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + onCancel() + } + } + + const prepareData = () => { + const data = { + name: name, + type: evaluatorType + } + if (evaluatorType === 'numeric') { + data.operator = selectedMetricOperator + data.value = selectedMetricValue + data.measure = selectedEvaluator + } else if (evaluatorType === 'text' || evaluatorType === 'json') { + data.operator = selectedEvaluator + data.value = selectedValue + } else if (evaluatorType === 'llm') { + data.outputSchema = outputSchema + data.prompt = prompt + } + return data + } + + const addEvaluator = async () => { + try { + const data = prepareData() + + const createResp = await evaluatorsApi.createEvaluator(data) + if (createResp.data) { + enqueueSnackbar({ + message: 'New Evaluator added', + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + onConfirm(createResp.data.id) + } + } catch (error) { + enqueueSnackbar({ + message: `Failed to add new Evaluator: ${ + typeof error.response.data === 'object' ? error.response.data.message : error.response.data + }`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + onCancel() + } + } + + useEffect(() => { + if (dialogProps.data && dialogProps.type === 'EDIT') { + const data = dialogProps.data + onEvaluatorTypeChange(data.type) + setName(data.name) + + if ('text' === data.type || 'json' === data.type) { + setSelectedEvaluator(data.operator) + setSelectedValue(data.value) + } else if ('numeric' === data.type) { + setSelectedValue(data.measure) + setSelectedMetricValue(data.value) + setSelectedMetricOperator(data.operator) + setSelectedEvaluator(data.measure) + } else if ('llm' === data.type) { + setPrompt(data.prompt) + setOutputSchema(data.outputSchema) + } + } else if (dialogProps.data && dialogProps.type === 'ADD') { + const data = dialogProps.data + onEvaluatorTypeChange(data.type) + setName(data.name) + setOutputSchema([]) + } + + return () => { + // reset all values + setName('') + setEvaluatorType('') + setAvailableEvaluators([]) + setSelectedEvaluator('') + setSelectedValue('') + setSelectedMetricValue('0') + setSelectedMetricOperator('equals') + setOutputSchema([]) + } + }, [dialogProps]) + + useEffect(() => { + if (show) dispatch({ type: SHOW_CANVAS_DIALOG }) + else dispatch({ type: HIDE_CANVAS_DIALOG }) + return () => dispatch({ type: HIDE_CANVAS_DIALOG }) + }, [show, dispatch]) + + const component = show ? ( + + +
    + + {dialogProps.type === 'ADD' ? 'Add Evaluator' : 'Edit Evaluator'} +
    +
    + + + Name + setName(e.target.value)} + value={name ?? ''} + /> + + + Evaluator Type + onEvaluatorTypeChange(newValue)} + value={evaluatorType} + /> + + {evaluatorType && evaluatorType !== 'llm' && ( + + Available Evaluators + setSelectedEvaluator(e)} + value={selectedEvaluator} + /> + + )} + {evaluatorType === 'numeric' && selectedEvaluator && ( + <> + + Select Operator + setSelectedMetricOperator(e)} + value={selectedMetricOperator} + /> + + + Value + setSelectedMetricValue(e.target.value)} + value={selectedMetricValue ?? '0'} + /> + + {getCaption()} + + + + )} + {evaluatorType === 'text' && selectedEvaluator && ( + <> + + Value + setSelectedValue(e.target.value)} + value={selectedValue} + sx={{ mb: 2 }} + /> + + {getCaption()} + + + + )} + {evaluatorType === 'llm' && ( + <> + + + + Output Schema + + + + + + + + + + +
    + Prompt +
    + {prompt && ( + + onExpandDialogClicked({ + label: 'Evaluation Prompt', + name: 'evaluationPrompt', + type: 'string' + }) + } + > + + + )} +
    + setPrompt(e.target.value)} + value={prompt} + /> +
    +
    + + + You can use {question} {actualOutput}{' '} + {expectedOutput} to inject runtime values into your prompt. + +
    +
    +
    + + )} +
    + + + (dialogProps.type === 'ADD' ? addEvaluator() : updateEvaluator())} + > + {dialogProps.confirmButtonName} + + + + setShowExpandDialog(false)} + onConfirm={(newValue) => onExpandDialogSave(newValue)} + > + setShowSamplePromptDialog(false)} + onConfirm={(newValue) => onSamplePromptSelected(newValue)} + > +
    + ) : null + + return createPortal(component, portalElement) +} + +AddEditEvaluatorDialog.propTypes = { + show: PropTypes.bool, + dialogProps: PropTypes.object, + onCancel: PropTypes.func, + onConfirm: PropTypes.func +} + +export default AddEditEvaluatorDialog diff --git a/packages/ui/src/views/evaluators/SamplePromptDialog.jsx b/packages/ui/src/views/evaluators/SamplePromptDialog.jsx new file mode 100644 index 00000000000..e71c6068383 --- /dev/null +++ b/packages/ui/src/views/evaluators/SamplePromptDialog.jsx @@ -0,0 +1,173 @@ +import { createPortal } from 'react-dom' +import PropTypes from 'prop-types' +import { useState, useEffect, useMemo } from 'react' + +// Material +import { Dialog, DialogActions, DialogContent, DialogTitle, Box, Typography, Divider, Stack, OutlinedInput, Button } from '@mui/material' + +// Project imports +import { StyledButton } from '@/ui-component/button/StyledButton' +import { Dropdown } from '@/ui-component/dropdown/Dropdown' +import { TooltipWithParser } from '@/ui-component/tooltip/TooltipWithParser' +import { Grid } from '@/ui-component/grid/Grid' + +// Icons +import { IconTestPipe2 } from '@tabler/icons-react' + +// utils +import useNotifier from '@/utils/useNotifier' + +// const +import { evaluationPrompts } from '@/views/evaluators/evaluationPrompts' + +const SamplePromptDialog = ({ show, dialogProps, onCancel, onConfirm }) => { + const portalElement = document.getElementById('portal') + useNotifier() + + const [selectedPromptName, setSelectedPromptName] = useState('') + const [selectedConfig, setSelectedConfig] = useState([]) + const [selectedPromptText, setSelectedPromptText] = useState('') + + useEffect(() => { + resetData() + return () => { + resetData() + } + }, [dialogProps]) + + const resetData = () => { + setSelectedPromptName('') + setSelectedConfig([]) + setSelectedPromptText('') + } + + const onSelected = async (selectedPromptName) => { + if (selectedPromptName) { + const selected = evaluationPrompts.find((prompt) => prompt.name === selectedPromptName) + setSelectedConfig(selected.json) + setSelectedPromptText(selected.prompt) + setSelectedPromptName(selected.name) + } else { + setSelectedPromptName('') + setSelectedConfig([]) + setSelectedPromptText('') + } + } + + const onConfirmPrompt = async () => { + const selected = evaluationPrompts.find((prompt) => prompt.name === selectedPromptName) + onConfirm(selected) + } + + const disableButton = () => { + return !selectedPromptName || !selectedPromptText + } + + const columns = useMemo( + () => [ + { field: 'property', headerName: 'Property', flex: 1 }, + { + field: 'type', + headerName: 'Type', + type: 'singleSelect', + valueOptions: ['string', 'number', 'boolean'], + width: 120 + }, + { field: 'description', headerName: 'Description', flex: 1 }, + { field: 'required', headerName: 'Required', type: 'boolean', width: 80 }, + { + field: 'actions', + type: 'actions', + width: 80, + getActions: () => [] + } + ], + [] + ) + + const component = show ? ( + + +
    + + Sample Prompts +
    +
    + + + + + + Available Prompts * + + + + {selectedPromptName && ( + + + + Output Schema + + + + + + )} + {selectedPromptName && ( + +
    + Prompt +
    + setSelectedPromptText(e.target.value)} + value={selectedPromptText} + /> +
    + )} +
    +
    + + + onConfirmPrompt()} + > + {'Select Prompt'} + + +
    + ) : null + + return createPortal(component, portalElement) +} + +SamplePromptDialog.propTypes = { + show: PropTypes.bool, + dialogProps: PropTypes.object, + onCancel: PropTypes.func, + onConfirm: PropTypes.func +} + +export default SamplePromptDialog diff --git a/packages/ui/src/views/evaluators/evaluationPrompts.js b/packages/ui/src/views/evaluators/evaluationPrompts.js new file mode 100644 index 00000000000..e902638754c --- /dev/null +++ b/packages/ui/src/views/evaluators/evaluationPrompts.js @@ -0,0 +1,26 @@ +export const evaluationPrompts = [ + { + name: 'correctness', + label: 'Correctness', + json: [{ id: 1, property: 'score', description: 'graded score', type: 'number', required: true }], + prompt: `Respond with a numeric score based on how well the following response compare to the ground truth. Grade only based expected response: + +Ground Truth: {expectedOutput} + +DATA: +--------- +Response: {actualOutput} +--------- + +Do not include any other information in your response. Do not evaluate correctness to the question, only match it to the reference. It is very critical that you answer only with a numeric score. Is the assistants answer grounded in and similar to the ground truth answer? A score of 1 means that the assistant answer is not at all grounded in the ground truth answer, while a score of 5 means that the assistant answer contains some information that is grounded in and similar to the ground truth answer. A score of 10 means that the assistant answer is fully ground and similar to the ground truth answer. Please provide a score between 1 and 10. Do not generate any newlines in the response.` + }, + { + name: 'hallucination', + label: 'Hallucination', + json: [ + { id: 1, property: 'score', description: 'provide a score between 0 and 1', type: 'number', required: true }, + { id: 2, property: 'reasoning', description: 'provide a one sentence reasoning', type: 'string', required: true } + ], + prompt: `Evaluate the degree of hallucination in the generation on a continuous scale from 0 to 1. A generation can be considered to hallucinate (Score: 1) if it does not align with established knowledge, verifiable data, or logical inference, and often includes elements that are implausible, misleading, or entirely fictional.\n\nExample:\nQuery: Can eating carrots improve your vision?\nGeneration: Yes, eating carrots significantly improves your vision, especially at night. This is why people who eat lots of carrots never need glasses. Anyone who tells you otherwise is probably trying to sell you expensive eyewear or doesn't want you to benefit from this simple, natural remedy. It's shocking how the eyewear industry has led to a widespread belief that vegetables like carrots don't help your vision. People are so gullible to fall for these money-making schemes.\n\nScore: 1.0\nReasoning: Carrots only improve vision under specific circumstances, namely a lack of vitamin A that leads to decreased vision. Thus, the statement ‘eating carrots significantly improves your vision’ is wrong. Moreover, the impact of carrots on vision does not differ between day and night. So also the clause ‘especially is night’ is wrong. Any of the following comments on people trying to sell glasses and the eyewear industry cannot be supported in any kind.\n\nInput:\nQuery: {question}\nGeneration: {actualOutput}\n\nThink step by step.` + } +] diff --git a/packages/ui/src/views/evaluators/evaluatorConstant.js b/packages/ui/src/views/evaluators/evaluatorConstant.js new file mode 100644 index 00000000000..79272cf0f05 --- /dev/null +++ b/packages/ui/src/views/evaluators/evaluatorConstant.js @@ -0,0 +1,143 @@ +// TODO: Move this to a config file +export const evaluators = [ + { + type: 'text', + name: 'ContainsAny', + label: 'Contains Any', + description: 'Returns true if any of the specified comma separated values are present in the response.' + }, + { + type: 'text', + name: 'ContainsAll', + label: 'Contains All', + description: 'Returns true if ALL of the specified comma separated values are present in the response.' + }, + { + type: 'text', + name: 'DoesNotContainAny', + label: 'Does Not Contains Any', + description: 'Returns true if any of the specified comma separated values are present in the response.' + }, + { + type: 'text', + name: 'DoesNotContainAll', + label: 'Does Not Contains All', + description: 'Returns true if ALL of the specified comma separated values are present in the response.' + }, + { + type: 'text', + name: 'StartsWith', + label: 'Starts With', + description: 'Returns true if the response starts with the specified value.' + }, + { + type: 'text', + name: 'NotStartsWith', + label: 'Does Not Start With', + description: 'Returns true if the response does not start with the specified value.' + }, + { + type: 'json', + name: 'IsValidJSON', + label: 'Is Valid JSON', + description: 'Returns true if the response is a valid JSON.' + }, + { + type: 'json', + name: 'IsNotValidJSON', + label: 'Is Not a Valid JSON', + description: 'Returns true if the response is a not a valid JSON.' + }, + { + type: 'numeric', + name: 'totalTokens', + label: 'Total Tokens', + description: 'Sum of Prompt Tokens and Completion Tokens.' + }, + { + type: 'numeric', + label: 'Prompt Tokens', + name: 'promptTokens', + description: 'This is the number of tokens in your prompt.' + }, + { + type: 'numeric', + label: 'Completion Tokens', + name: 'completionTokens', + description: 'Completion tokens are any tokens that the model generates in response to your input.' + }, + { + type: 'numeric', + label: 'Total API Latency', + name: 'apiLatency', + description: 'Total time taken for the Flowise Prediction API call (milliseconds).' + }, + { + type: 'numeric', + label: 'LLM Latency', + name: 'llm', + description: 'Actual LLM invocation time (milliseconds).' + }, + { + type: 'numeric', + label: 'Chatflow Latency', + name: 'chain', + description: 'Actual time spent in executing the chatflow (milliseconds).' + }, + { + type: 'numeric', + label: 'Output Chars Length', + name: 'responseLength', + description: 'Number of characters in the response.' + } +] + +export const evaluatorTypes = [ + { + label: 'Evaluate Result (Text Based)', + name: 'text', + description: 'Set of Evaluators to evaluate the result of a Chatflow.' + }, + { + label: 'Evaluate Result (JSON)', + name: 'json', + description: 'Set of Evaluators to evaluate the JSON response of a Chatflow.' + }, + { + label: 'Evaluate Metrics (Numeric)', + name: 'numeric', + description: 'Set of Evaluators that evaluate the metrics (latency, tokens, cost, length of response) of a Chatflow.' + }, + { + label: 'LLM based Grading (JSON)', + name: 'llm', + description: 'Post execution, grades the answers by using an LLM.' + } +] + +export const numericOperators = [ + { + label: 'Equals', + name: 'equals' + }, + { + label: 'Not Equals', + name: 'notEquals' + }, + { + label: 'Greater Than', + name: 'greaterThan' + }, + { + label: 'Less Than', + name: 'lessThan' + }, + { + label: 'Greater Than or Equals', + name: 'greaterThanOrEquals' + }, + { + label: 'Less Than or Equals', + name: 'lessThanOrEquals' + } +] diff --git a/packages/ui/src/views/evaluators/index.jsx b/packages/ui/src/views/evaluators/index.jsx new file mode 100644 index 00000000000..934200ea7bb --- /dev/null +++ b/packages/ui/src/views/evaluators/index.jsx @@ -0,0 +1,530 @@ +import { useEffect, useState } from 'react' +import { useDispatch, useSelector } from 'react-redux' + +// material-ui +import { Chip, Skeleton, Box, Stack, TableContainer, Paper, Table, TableHead, TableRow, TableCell, TableBody, Button } from '@mui/material' +import { useTheme } from '@mui/material/styles' + +// project imports +import { closeSnackbar as closeSnackbarAction, enqueueSnackbar as enqueueSnackbarAction } from '@/store/actions' +import MainCard from '@/ui-component/cards/MainCard' +import ViewHeader from '@/layout/MainLayout/ViewHeader' +import ErrorBoundary from '@/ErrorBoundary' +import ConfirmDialog from '@/ui-component/dialog/ConfirmDialog' +import AddEditEvaluatorDialog from '@/views/evaluators/AddEditEvaluatorDialog' +import { StyledTableCell, StyledTableRow } from '@/ui-component/table/TableStyles' +import { PermissionIconButton, StyledPermissionButton } from '@/ui-component/button/RBACButtons' + +// API +import evaluatorsApi from '@/api/evaluators' +import moment from 'moment/moment' + +// Hooks +import useNotifier from '@/utils/useNotifier' +import useConfirm from '@/hooks/useConfirm' +import useApi from '@/hooks/useApi' + +// icons +import empty_evaluatorSVG from '@/assets/images/empty_evaluators.svg' +import { IconTrash, IconPlus, IconJson, IconX, IconNumber123, IconAbc, IconAugmentedReality } from '@tabler/icons-react' +import { truncateString } from '@/utils/genericHelper' + +// const +import { evaluators as evaluatorsOptions, numericOperators } from '../evaluators/evaluatorConstant' +import { useError } from '@/store/context/ErrorContext' + +// ==============================|| Evaluators ||============================== // + +const Evaluators = () => { + const theme = useTheme() + const customization = useSelector((state) => state.customization) + const dispatch = useDispatch() + const { confirm } = useConfirm() + useNotifier() + const { error } = useError() + + const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) + const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) + + const [search, setSearch] = useState('') + const [isLoading, setLoading] = useState(true) + const [showEvaluatorDialog, setShowEvaluatorDialog] = useState(false) + const [dialogProps, setDialogProps] = useState({}) + const [evaluators, setEvaluators] = useState([]) + + const getAllEvaluators = useApi(evaluatorsApi.getAllEvaluators) + + const onSearchChange = (event) => { + setSearch(event.target.value) + } + + const newEvaluator = () => { + const dialogProp = { + type: 'ADD', + cancelButtonName: 'Cancel', + confirmButtonName: 'Add', + data: {} + } + setDialogProps(dialogProp) + setShowEvaluatorDialog(true) + } + + const edit = (item) => { + const dialogProp = { + type: 'EDIT', + cancelButtonName: 'Cancel', + confirmButtonName: 'Save', + data: item + } + setDialogProps(dialogProp) + setShowEvaluatorDialog(true) + } + + const deleteEvaluator = async (item) => { + const confirmPayload = { + title: `Delete`, + description: `Delete Evaluator ${item.name}?`, + confirmButtonName: 'Delete', + cancelButtonName: 'Cancel' + } + const isConfirmed = await confirm(confirmPayload) + + if (isConfirmed) { + try { + const deleteResp = await evaluatorsApi.deleteEvaluator(item.id) + if (deleteResp.data) { + enqueueSnackbar({ + message: 'Evaluator deleted', + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + onConfirm() + } + } catch (error) { + enqueueSnackbar({ + message: `Failed to delete Evaluator: ${ + typeof error.response.data === 'object' ? error.response.data.message : error.response.data + }`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + } + } + } + + const onConfirm = () => { + setShowEvaluatorDialog(false) + getAllEvaluators.request() + } + + function filterDatasets(data) { + return data.name.toLowerCase().indexOf(search.toLowerCase()) > -1 + } + + useEffect(() => { + getAllEvaluators.request() + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []) + + useEffect(() => { + if (getAllEvaluators.data) { + setEvaluators(getAllEvaluators.data) + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getAllEvaluators.data]) + + useEffect(() => { + setLoading(getAllEvaluators.loading) + }, [getAllEvaluators.loading]) + + return ( + <> + + {error ? ( + + ) : ( + + + } + > + New Evaluator + + + {!isLoading && evaluators.length <= 0 ? ( + + + empty_evaluatorSVG + +
    No Evaluators Yet
    +
    + ) : ( + + + + + Type + Name + Details + Last Updated + + + + + {isLoading ? ( + <> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ) : ( + <> + {evaluators.filter(filterDatasets).map((ds, index) => ( + <> + + edit(ds)}> + {ds?.type === 'numeric' && ( + + } label='Numeric' variant='outlined' /> + + )} + {ds?.type === 'text' && ( + + } label='Text Based' variant='outlined' /> + + )} + {ds?.type === 'json' && ( + + } label='JSON Based' variant='outlined' /> + + )} + {ds?.type === 'llm' && ( + + } + label='LLM Based' + variant='outlined' + /> + + )} + + edit(ds)} component='th' scope='row'> + {ds.name} + + edit(ds)}> + {ds?.type === 'numeric' && ( + + + Measure:{' '} + { + [...evaluatorsOptions, ...numericOperators].find( + (item) => item.name === ds?.measure + )?.label + } + + } + /> + + Operator:{' '} + { + [...evaluatorsOptions, ...numericOperators].find( + (item) => item.name === ds?.operator + )?.label + } + + } + /> + + Value: {ds?.value} + + } + /> + + )} + {ds?.type === 'text' && ( + + + Operator:{' '} + { + [...evaluatorsOptions, ...numericOperators].find( + (item) => item.name === ds?.operator + )?.label + } + + } + /> + + Value: {ds?.value} + + } + /> + + )} + {ds?.type === 'json' && ( + + + Operator:{' '} + { + [...evaluatorsOptions].find( + (item) => item.name === ds?.operator + )?.label + } + + } + /> + + )} + {ds?.type === 'llm' && ( + + + Prompt: {truncateString(ds?.prompt, 100)} + + } + /> + + Output Schema Elements:{' '} + {ds?.outputSchema.length > 0 + ? ds?.outputSchema + .map((item) => item.property) + .join(', ') + : 'None'} + + } + /> + + )} + + edit(ds)}> + {moment(ds.updatedDate).format('MMMM Do YYYY, hh:mm A')} + + + deleteEvaluator(ds)} + > + + + + + + ))} + + )} + +
    +
    + )} +
    + )} +
    + {showEvaluatorDialog && ( + setShowEvaluatorDialog(false)} + onConfirm={onConfirm} + > + )} + + + ) +} + +export default Evaluators diff --git a/packages/ui/src/views/files/index.jsx b/packages/ui/src/views/files/index.jsx new file mode 100644 index 00000000000..e5b952c838a --- /dev/null +++ b/packages/ui/src/views/files/index.jsx @@ -0,0 +1,152 @@ +import { useEffect, useState } from 'react' + +// material-ui +import { Box, Button, Stack } from '@mui/material' + +// project imports +import MainCard from '@/ui-component/cards/MainCard' +import WorkflowEmptySVG from '@/assets/images/workflow_empty.svg' +import ViewHeader from '@/layout/MainLayout/ViewHeader' +import ErrorBoundary from '@/ErrorBoundary' +import { FilesTable } from '@/ui-component/table/FilesTable' +import useConfirm from '@/hooks/useConfirm' +import useNotifier from '@/utils/useNotifier' +import { closeSnackbar as closeSnackbarAction, enqueueSnackbar as enqueueSnackbarAction } from '@/store/actions' +import ConfirmDialog from '@/ui-component/dialog/ConfirmDialog' + +// API +import filesApi from '@/api/files' + +// Hooks +import useApi from '@/hooks/useApi' + +// icons +import { IconX } from '@tabler/icons-react' +import { useDispatch } from 'react-redux' +import { useError } from '@/store/context/ErrorContext' + +// ==============================|| CHATFLOWS ||============================== // + +const Files = () => { + const { confirm } = useConfirm() + + const [isLoading, setLoading] = useState(true) + const { error, setError } = useError() + const [files, setFiles] = useState([]) + const [search, setSearch] = useState('') + + const getAllFilesApi = useApi(filesApi.getAllFiles) + + const dispatch = useDispatch() + + useNotifier() + const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) + const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) + + const onSearchChange = (event) => { + setSearch(event.target.value) + } + + function filterFiles(data) { + return ( + data.name.toLowerCase().indexOf(search.toLowerCase()) > -1 || + (data.category && data.category.toLowerCase().indexOf(search.toLowerCase()) > -1) + ) + } + + const handleDeleteFile = async (file) => { + const confirmPayload = { + title: `Delete`, + description: `Delete ${file.name}? This process cannot be undone.`, + confirmButtonName: 'Delete', + cancelButtonName: 'Cancel' + } + const isConfirmed = await confirm(confirmPayload) + + if (isConfirmed) { + try { + const deleteResponse = await filesApi.deleteFile(file.path) + if (deleteResponse?.data) { + enqueueSnackbar({ + message: 'File deleted', + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + } + await getAllFilesApi.request() + } catch (error) { + setError(error) + enqueueSnackbar({ + message: typeof error.response.data === 'object' ? error.response.data.message : error.response.data, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + } + } + } + + useEffect(() => { + getAllFilesApi.request() + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []) + + useEffect(() => { + setLoading(getAllFilesApi.loading) + }, [getAllFilesApi.loading]) + + useEffect(() => { + if (getAllFilesApi.data) { + try { + const files = getAllFilesApi.data + setFiles(files) + } catch (e) { + console.error(e) + } + } + }, [getAllFilesApi.data]) + + return ( + + {error ? ( + + ) : ( + + + + {!isLoading && (!getAllFilesApi.data || getAllFilesApi.data.length === 0) && ( + + + WorkflowEmptySVG + +
    No Files Yet
    +
    + )} +
    + )} + + +
    + ) +} + +export default Files diff --git a/packages/ui/src/views/marketplaces/MarketplaceCanvas.jsx b/packages/ui/src/views/marketplaces/MarketplaceCanvas.jsx index 0c3040538e7..8144496b25b 100644 --- a/packages/ui/src/views/marketplaces/MarketplaceCanvas.jsx +++ b/packages/ui/src/views/marketplaces/MarketplaceCanvas.jsx @@ -1,9 +1,10 @@ -import { useEffect, useRef } from 'react' +import { useEffect, useRef, useState } from 'react' import ReactFlow, { Controls, Background, useNodesState, useEdgesState } from 'reactflow' import 'reactflow/dist/style.css' import '@/views/canvas/index.css' import { useLocation, useNavigate } from 'react-router-dom' +import { useSelector } from 'react-redux' // material-ui import { Toolbar, Box, AppBar } from '@mui/material' @@ -14,6 +15,9 @@ import MarketplaceCanvasNode from './MarketplaceCanvasNode' import MarketplaceCanvasHeader from './MarketplaceCanvasHeader' import StickyNote from '../canvas/StickyNote' +// icons +import { IconMagnetFilled, IconMagnetOff } from '@tabler/icons-react' + const nodeTypes = { customNode: MarketplaceCanvasNode, stickyNote: StickyNote } const edgeTypes = { buttonedge: '' } @@ -22,6 +26,7 @@ const edgeTypes = { buttonedge: '' } const MarketplaceCanvas = () => { const theme = useTheme() const navigate = useNavigate() + const customization = useSelector((state) => state.customization) const { state } = useLocation() const { flowData, name } = state @@ -30,6 +35,7 @@ const MarketplaceCanvas = () => { const [nodes, setNodes, onNodesChange] = useNodesState() const [edges, setEdges, onEdgesChange] = useEdgesState() + const [isSnappingEnabled, setIsSnappingEnabled] = useState(false) const reactFlowWrapper = useRef(null) @@ -86,15 +92,29 @@ const MarketplaceCanvas = () => { edgeTypes={edgeTypes} fitView minZoom={0.1} + snapGrid={[25, 25]} + snapToGrid={isSnappingEnabled} > + > + +
    diff --git a/packages/ui/src/views/marketplaces/MarketplaceCanvasHeader.jsx b/packages/ui/src/views/marketplaces/MarketplaceCanvasHeader.jsx index 3fbe78f2766..7e8e03f0c1d 100644 --- a/packages/ui/src/views/marketplaces/MarketplaceCanvasHeader.jsx +++ b/packages/ui/src/views/marketplaces/MarketplaceCanvasHeader.jsx @@ -8,6 +8,7 @@ import { StyledButton } from '@/ui-component/button/StyledButton' // icons import { IconCopy, IconChevronLeft } from '@tabler/icons-react' +import { Available } from '@/ui-component/rbac/available' // ==============================|| CANVAS HEADER ||============================== // @@ -52,17 +53,19 @@ const MarketplaceCanvasHeader = ({ flowName, flowData, onChatflowCopy }) => { - - onChatflowCopy(flowData)} - startIcon={} - > - Use Template - - + + + onChatflowCopy(flowData)} + startIcon={} + > + Use Template + + + ) } diff --git a/packages/ui/src/views/marketplaces/index.jsx b/packages/ui/src/views/marketplaces/index.jsx index 52e87994c19..82e91425c1e 100644 --- a/packages/ui/src/views/marketplaces/index.jsx +++ b/packages/ui/src/views/marketplaces/index.jsx @@ -20,8 +20,7 @@ import { ToggleButtonGroup, MenuItem, Button, - Tabs, - Tab + Tabs } from '@mui/material' import { useTheme } from '@mui/material/styles' import { IconLayoutGrid, IconList, IconX } from '@tabler/icons-react' @@ -37,6 +36,9 @@ import ErrorBoundary from '@/ErrorBoundary' import { TabPanel } from '@/ui-component/tabs/TabPanel' import { closeSnackbar as closeSnackbarAction, enqueueSnackbar as enqueueSnackbarAction } from '@/store/actions' import ConfirmDialog from '@/ui-component/dialog/ConfirmDialog' +import { PermissionTab } from '@/ui-component/button/RBACButtons' +import { Available } from '@/ui-component/rbac/available' +import ShareWithWorkspaceDialog from '@/ui-component/dialog/ShareWithWorkspaceDialog' // API import marketplacesApi from '@/api/marketplaces' @@ -44,11 +46,15 @@ import marketplacesApi from '@/api/marketplaces' // Hooks import useApi from '@/hooks/useApi' import useConfirm from '@/hooks/useConfirm' +import { useAuth } from '@/hooks/useAuth' + +// Utils +import useNotifier from '@/utils/useNotifier' // const import { baseURL, AGENTFLOW_ICONS } from '@/store/constant' import { gridSpacing } from '@/store/constant' -import useNotifier from '@/utils/useNotifier' +import { useError } from '@/store/context/ErrorContext' const badges = ['POPULAR', 'NEW'] const types = ['Chatflow', 'Agentflow', 'AgentflowV2', 'Tool'] @@ -69,9 +75,9 @@ const Marketplace = () => { useNotifier() const theme = useTheme() + const { error, setError } = useError() const [isLoading, setLoading] = useState(true) - const [error, setError] = useState(null) const [images, setImages] = useState({}) const [icons, setIcons] = useState({}) const [usecases, setUsecases] = useState([]) @@ -99,6 +105,26 @@ const Marketplace = () => { const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) const { confirm } = useConfirm() + const { hasPermission } = useAuth() + + const [showShareTemplateDialog, setShowShareTemplateDialog] = useState(false) + const [shareTemplateDialogProps, setShareTemplateDialogProps] = useState({}) + + const share = (template) => { + const dialogProps = { + type: 'EDIT', + cancelButtonName: 'Cancel', + confirmButtonName: 'Share', + data: { + id: template.id, + name: template.name, + title: 'Share Custom Template', + itemType: 'custom_template' + } + } + setShareTemplateDialogProps(dialogProps) + setShowShareTemplateDialog(true) + } const getSelectStyles = (borderColor, isDarkMode) => ({ '& .MuiOutlinedInput-notchedOutline': { @@ -320,8 +346,12 @@ const Marketplace = () => { } useEffect(() => { - getAllTemplatesMarketplacesApi.request() - + if (hasPermission('templates:marketplace')) { + getAllTemplatesMarketplacesApi.request() + } else if (hasPermission('templates:custom')) { + setActiveTabValue(1) + getAllCustomTemplatesApi.request() + } // eslint-disable-next-line react-hooks/exhaustive-deps }, []) @@ -345,13 +375,17 @@ const Marketplace = () => { images[flows[i].id] = [] icons[flows[i].id] = [] for (let j = 0; j < nodes.length; j += 1) { + if (nodes[j].data.name === 'stickyNote' || nodes[j].data.name === 'stickyNoteAgentflow') continue const foundIcon = AGENTFLOW_ICONS.find((icon) => icon.name === nodes[j].data.name) if (foundIcon) { icons[flows[i].id].push(foundIcon) } else { const imageSrc = `${baseURL}/api/v1/node-icon/${nodes[j].data.name}` - if (!images[flows[i].id].includes(imageSrc)) { - images[flows[i].id].push(imageSrc) + if (!images[flows[i].id].some((img) => img.imageSrc === imageSrc)) { + images[flows[i].id].push({ + imageSrc, + label: nodes[j].data.name + }) } } } @@ -368,9 +402,10 @@ const Marketplace = () => { }, [getAllTemplatesMarketplacesApi.data]) useEffect(() => { - if (getAllTemplatesMarketplacesApi.error) { + if (getAllTemplatesMarketplacesApi.error && setError) { setError(getAllTemplatesMarketplacesApi.error) } + // eslint-disable-next-line react-hooks/exhaustive-deps }, [getAllTemplatesMarketplacesApi.error]) useEffect(() => { @@ -420,9 +455,10 @@ const Marketplace = () => { }, [getAllCustomTemplatesApi.data]) useEffect(() => { - if (getAllCustomTemplatesApi.error) { + if (getAllCustomTemplatesApi.error && setError) { setError(getAllCustomTemplatesApi.error) } + // eslint-disable-next-line react-hooks/exhaustive-deps }, [getAllCustomTemplatesApi.error]) return ( @@ -584,76 +620,95 @@ const Marketplace = () => { - - - - - - - {usecases.map((usecase, index) => ( - { - setSelectedUsecases( - event.target.checked - ? [...selectedUsecases, usecase] - : selectedUsecases.filter((item) => item !== usecase) - ) - }} - /> - } - label={usecase} - /> - ))} - - {selectedUsecases.length > 0 && ( - - )} - - {!view || view === 'card' ? ( - <> - {isLoading ? ( - - - - - - ) : ( - - {getAllTemplatesMarketplacesApi.data - ?.filter(filterByBadge) - .filter(filterByType) - .filter(filterFlows) - .filter(filterByFramework) - .filter(filterByUsecases) - .map((data, index) => ( - - {data.badge && ( - - {(data.type === 'Chatflow' || + {hasPermission('templates:marketplace') && hasPermission('templates:custom') && ( + + + + + )} + + + + {usecases.map((usecase, index) => ( + { + setSelectedUsecases( + event.target.checked + ? [...selectedUsecases, usecase] + : selectedUsecases.filter((item) => item !== usecase) + ) + }} + /> + } + label={usecase} + /> + ))} + + {selectedUsecases.length > 0 && ( + + )} + + {!view || view === 'card' ? ( + <> + {isLoading ? ( + + + + + + ) : ( + + {getAllTemplatesMarketplacesApi.data + ?.filter(filterByBadge) + .filter(filterByType) + .filter(filterFlows) + .filter(filterByFramework) + .filter(filterByUsecases) + .map((data, index) => ( + + {data.badge && ( + + {(data.type === 'Chatflow' || + data.type === 'Agentflow' || + data.type === 'AgentflowV2') && ( + goToCanvas(data)} + data={data} + images={images[data.id]} + icons={icons[data.id]} + /> + )} + {data.type === 'Tool' && ( + goToTool(data)} /> + )} + + )} + {!data.badge && + (data.type === 'Chatflow' || data.type === 'Agentflow' || data.type === 'AgentflowV2') && ( { icons={icons[data.id]} /> )} - {data.type === 'Tool' && ( - goToTool(data)} /> - )} - - )} - {!data.badge && - (data.type === 'Chatflow' || - data.type === 'Agentflow' || - data.type === 'AgentflowV2') && ( - goToCanvas(data)} - data={data} - images={images[data.id]} - icons={icons[data.id]} - /> + {!data.badge && data.type === 'Tool' && ( + goToTool(data)} /> )} - {!data.badge && data.type === 'Tool' && ( - goToTool(data)} /> - )} - - ))} - + + ))} + + )} + + ) : ( + + )} + + {!isLoading && + (!getAllTemplatesMarketplacesApi.data || getAllTemplatesMarketplacesApi.data.length === 0) && ( + + + WorkflowEmptySVG + +
    No Marketplace Yet
    +
    )} - - ) : ( - - )} - - {!isLoading && (!getAllTemplatesMarketplacesApi.data || getAllTemplatesMarketplacesApi.data.length === 0) && ( - - - WorkflowEmptySVG + + + + + {templateUsecases.map((usecase, index) => ( + { + setSelectedTemplateUsecases( + event.target.checked + ? [...selectedTemplateUsecases, usecase] + : selectedTemplateUsecases.filter((item) => item !== usecase) + ) + }} + /> + } + label={usecase} /> - -
    No Marketplace Yet
    + ))}
    - )} -
    - - - {templateUsecases.map((usecase, index) => ( - { - setSelectedTemplateUsecases( - event.target.checked - ? [...selectedTemplateUsecases, usecase] - : selectedTemplateUsecases.filter((item) => item !== usecase) - ) - }} - /> - } - label={usecase} - /> - ))} - - {selectedTemplateUsecases.length > 0 && ( - - )} - {!view || view === 'card' ? ( - <> - {isLoading ? ( - - - - - - ) : ( - - {getAllCustomTemplatesApi.data - ?.filter(filterByBadge) - .filter(filterByType) - .filter(filterFlows) - .filter(filterByFramework) - .filter(filterByUsecases) - .map((data, index) => ( - - {data.badge && ( - - {(data.type === 'Chatflow' || + {selectedTemplateUsecases.length > 0 && ( + + )} + {!view || view === 'card' ? ( + <> + {isLoading ? ( + + + + + + ) : ( + + {getAllCustomTemplatesApi.data + ?.filter(filterByBadge) + .filter(filterByType) + .filter(filterFlows) + .filter(filterByFramework) + .filter(filterByUsecases) + .map((data, index) => ( + + {data.badge && ( + + {(data.type === 'Chatflow' || + data.type === 'Agentflow' || + data.type === 'AgentflowV2') && ( + goToCanvas(data)} + data={data} + images={templateImages[data.id]} + icons={templateIcons[data.id]} + /> + )} + {data.type === 'Tool' && ( + goToTool(data)} /> + )} + + )} + {!data.badge && + (data.type === 'Chatflow' || data.type === 'Agentflow' || data.type === 'AgentflowV2') && ( { icons={templateIcons[data.id]} /> )} - {data.type === 'Tool' && ( - goToTool(data)} /> - )} - - )} - {!data.badge && - (data.type === 'Chatflow' || - data.type === 'Agentflow' || - data.type === 'AgentflowV2') && ( - goToCanvas(data)} - data={data} - images={templateImages[data.id]} - icons={templateIcons[data.id]} - /> + {!data.badge && data.type === 'Tool' && ( + goToTool(data)} /> )} - {!data.badge && data.type === 'Tool' && ( - goToTool(data)} /> - )} - - ))} + + ))} + + )} + + ) : ( + + )} + {!isLoading && (!getAllCustomTemplatesApi.data || getAllCustomTemplatesApi.data.length === 0) && ( + + + WorkflowEmptySVG - )} - - ) : ( - - )} - {!isLoading && (!getAllCustomTemplatesApi.data || getAllCustomTemplatesApi.data.length === 0) && ( - - - WorkflowEmptySVG - -
    No Saved Custom Templates
    -
    - )} -
    +
    No Saved Custom Templates
    + + )} + + )} @@ -855,6 +899,14 @@ const Marketplace = () => { onConfirm={() => setShowToolDialog(false)} onUseTemplate={(tool) => onUseTemplate(tool)} > + {showShareTemplateDialog && ( + setShowShareTemplateDialog(false)} + setError={setError} + > + )} ) diff --git a/packages/ui/src/views/organization/index.jsx b/packages/ui/src/views/organization/index.jsx new file mode 100644 index 00000000000..5956ea58bb5 --- /dev/null +++ b/packages/ui/src/views/organization/index.jsx @@ -0,0 +1,495 @@ +import { useEffect, useState } from 'react' +import { useNavigate } from 'react-router-dom' +import { z } from 'zod' + +// material-ui +import { Alert, Box, Button, Chip, Divider, Icon, List, ListItemText, Stack, TextField, Typography } from '@mui/material' + +// project imports +import { StyledButton } from '@/ui-component/button/StyledButton' +import { Input } from '@/ui-component/input/Input' +import { BackdropLoader } from '@/ui-component/loading/BackdropLoader' + +// API +import accountApi from '@/api/account.api' +import authApi from '@/api/auth' +import loginMethodApi from '@/api/loginmethod' + +// Hooks +import useApi from '@/hooks/useApi' +import { store } from '@/store' +import { loginSuccess } from '@/store/reducers/authSlice' + +// utils +import useNotifier from '@/utils/useNotifier' +import { passwordSchema } from '@/utils/validation' + +// Icons +import Auth0SSOLoginIcon from '@/assets/images/auth0.svg' +import GoogleSSOLoginIcon from '@/assets/images/google.svg' +import AzureSSOLoginIcon from '@/assets/images/microsoft-azure.svg' +import { useConfig } from '@/store/context/ConfigContext' +import { IconCircleCheck, IconExclamationCircle } from '@tabler/icons-react' + +// ==============================|| Organization & Admin User Setup ||============================== // + +// IMPORTANT: when updating this schema, update the schema on the server as well +// packages/server/src/enterprise/Interface.Enterprise.ts +const OrgSetupSchema = z + .object({ + username: z.string().min(1, 'Name is required'), + email: z.string().min(1, 'Email is required').email('Invalid email address'), + password: passwordSchema, + confirmPassword: z.string().min(1, 'Confirm Password is required') + }) + .refine((data) => data.password === data.confirmPassword, { + message: "Passwords don't match", + path: ['confirmPassword'] + }) + +const OrganizationSetupPage = () => { + useNotifier() + const { isEnterpriseLicensed, isOpenSource } = useConfig() + + const orgNameInput = { + label: 'Organization', + name: 'organization', + type: 'text', + placeholder: 'Acme' + } + + const usernameInput = { + label: 'Username', + name: 'username', + type: 'text', + placeholder: 'John Doe' + } + + const passwordInput = { + label: 'Password', + name: 'password', + type: 'password', + placeholder: '********' + } + + const confirmPasswordInput = { + label: 'Confirm Password', + name: 'confirmPassword', + type: 'password', + placeholder: '********' + } + + const emailInput = { + label: 'EMail', + name: 'email', + type: 'email', + placeholder: 'user@company.com' + } + + const [email, setEmail] = useState('') + const [password, setPassword] = useState('') + const [confirmPassword, setConfirmPassword] = useState('') + const [username, setUsername] = useState('') + const [orgName, setOrgName] = useState('') + const [existingUsername, setExistingUsername] = useState('') + const [existingPassword, setExistingPassword] = useState('') + + const [loading, setLoading] = useState(false) + const [authError, setAuthError] = useState('') + const [successMsg, setSuccessMsg] = useState(undefined) + const [requiresAuthentication, setRequiresAuthentication] = useState(false) + + const loginApi = useApi(authApi.login) + const registerAccountApi = useApi(accountApi.registerAccount) + const getBasicAuthApi = useApi(accountApi.getBasicAuth) + const navigate = useNavigate() + + const getDefaultProvidersApi = useApi(loginMethodApi.getLoginMethods) + const [configuredSsoProviders, setConfiguredSsoProviders] = useState([]) + + const register = async (event) => { + event.preventDefault() + const result = OrgSetupSchema.safeParse({ + orgName, + username, + email, + password, + confirmPassword + }) + if (result.success) { + setLoading(true) + setAuthError('') + + // Check authentication first if required + if (requiresAuthentication) { + try { + const authResult = await accountApi.checkBasicAuth({ + username: existingUsername, + password: existingPassword + }) + + if (!authResult || !authResult.data || authResult.data.message !== 'Authentication successful') { + setAuthError('Authentication failed. Please check your existing credentials.') + setLoading(false) + return + } + } catch (error) { + setAuthError('Authentication failed. Please check your existing credentials.') + setLoading(false) + return + } + } + + // Proceed with registration after successful authentication + const body = { + user: { + name: username, + email: email, + type: 'pro', + credential: password + } + } + if (isEnterpriseLicensed) { + body.organization = { + name: orgName + } + } + await registerAccountApi.request(body) + } else { + // Handle validation errors + const errorMessages = result.error.errors.map((error) => error.message) + setAuthError(errorMessages.join(', ')) + } + } + + useEffect(() => { + if (registerAccountApi.error) { + const errMessage = + typeof registerAccountApi.error.response.data === 'object' + ? registerAccountApi.error.response.data.message + : registerAccountApi.error.response.data + let finalErrMessage = '' + if (isEnterpriseLicensed) { + finalErrMessage = `Error in registering organization. Please contact your administrator. (${errMessage})` + } else { + finalErrMessage = `Error in registering account.` + } + setAuthError(finalErrMessage) + setLoading(false) + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [registerAccountApi.error]) + + useEffect(() => { + if (getBasicAuthApi.data && getBasicAuthApi.data.isUsernamePasswordSet === true) { + setRequiresAuthentication(true) + } + }, [getBasicAuthApi.data]) + + useEffect(() => { + if (!isOpenSource) { + getDefaultProvidersApi.request() + } else { + getBasicAuthApi.request() + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []) + + useEffect(() => { + if (getDefaultProvidersApi.data && getDefaultProvidersApi.data.providers) { + setConfiguredSsoProviders(getDefaultProvidersApi.data.providers.map((provider) => provider)) + } + }, [getDefaultProvidersApi.data]) + + useEffect(() => { + if (registerAccountApi.data) { + setAuthError(undefined) + setConfirmPassword('') + setPassword('') + setUsername('') + setEmail('') + setSuccessMsg(registerAccountApi.data.message) + setTimeout(() => { + const body = { + email, + password + } + loginApi.request(body) + }, 1000) + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [registerAccountApi.data]) + + useEffect(() => { + if (loginApi.data) { + setLoading(false) + store.dispatch(loginSuccess(loginApi.data)) + localStorage.setItem('username', loginApi.data.name) + navigate(location.state?.path || '/chatflows') + //navigate(0) + } + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [loginApi.data]) + + const signInWithSSO = (ssoProvider) => { + window.location.href = `/api/v1/${ssoProvider}/login` + } + + return ( + <> + + + {authError && ( + } variant='filled' severity='error'> + {authError.split(', ').length > 0 ? ( + + {authError.split(', ').map((error, index) => ( + + ))} + + ) : ( + authError + )} + + )} + {successMsg && ( + } variant='filled' severity='success'> + {successMsg} + + )} + + Setup Account + + {requiresAuthentication && ( + + Application authentication now requires email and password. Contact administrator to setup an account. + + )} + {(isOpenSource || isEnterpriseLicensed) && ( + + Account setup does not make any external connections, your data stays securely on your locally hosted server. + + )} +
    + + {requiresAuthentication && ( + <> + +
    + + Existing Username * + +
    +
    + setExistingUsername(e.target.value)} + /> + + Existing username that was set as FLOWISE_USERNAME environment variable + +
    + +
    + + Existing Password * + +
    +
    + setExistingPassword(e.target.value)} + /> + + Existing password that was set as FLOWISE_PASSWORD environment variable + +
    + + + + + )} + {isEnterpriseLicensed && ( + <> + +
    + + Organization Name: * + +
    +
    + setOrgName(newValue)} + value={orgName} + showDialog={false} + /> +
    + + + + + + + )} + +
    + + Administrator Name * + +
    +
    + setUsername(newValue)} + value={username} + showDialog={false} + /> + + Is used for display purposes only. + +
    + +
    + + Administrator Email * + +
    +
    + setEmail(newValue)} + type='email' + value={email} + showDialog={false} + /> + + Kindly use a valid email address. Will be used as login id. + +
    + +
    + + Password * + +
    +
    + setPassword(newValue)} value={password} /> + + + Password must be at least 8 characters long and contain at least one lowercase letter, one uppercase + letter, one digit, and one special character (@$!%*?&-). + + +
    + +
    + + Confirm Password * + +
    +
    + setConfirmPassword(newValue)} + value={confirmPassword} + /> + + Reconfirm your password. Must match the password typed above. + +
    + + Sign Up + + {configuredSsoProviders && configuredSsoProviders.length > 0 && OR} + {configuredSsoProviders && + configuredSsoProviders.map( + (ssoProvider) => + //https://learn.microsoft.com/en-us/entra/identity-platform/howto-add-branding-in-apps + ssoProvider === 'azure' && ( + + ) + )} + {configuredSsoProviders && + configuredSsoProviders.map( + (ssoProvider) => + ssoProvider === 'google' && ( + + ) + )} + {configuredSsoProviders && + configuredSsoProviders.map( + (ssoProvider) => + ssoProvider === 'auth0' && ( + + ) + )} +
    +
    +
    +
    + {loading && } + + ) +} + +export default OrganizationSetupPage diff --git a/packages/ui/src/views/roles/CreateEditRoleDialog.css b/packages/ui/src/views/roles/CreateEditRoleDialog.css new file mode 100644 index 00000000000..7963033b848 --- /dev/null +++ b/packages/ui/src/views/roles/CreateEditRoleDialog.css @@ -0,0 +1,106 @@ +.role-editor { + padding: 20px 0px; + border-radius: 10px; + width: 100%; + font-family: Arial, sans-serif; + display: flex; + flex-direction: column; + gap: 20px; + height: 75vh; +} + +.role-name { + position: sticky; + top: 0; + z-index: 1; +} + +.role-description { + margin-bottom: 20px; + position: sticky; + top: 0; + padding: 10px 0; + z-index: 1; +} + +.permissions-container > p, +.role-name label { + display: block; + font-weight: bold; + margin: 0; + margin-bottom: 5px; +} + +.role-name input { + width: 100%; + padding: 10px; + border: 1px solid #ccc; + border-radius: 5px; + font-size: 14px; + display: block; +} + +.permissions-container { + overflow-y: hidden; + max-height: calc(100vh - 120px); /* Adjust based on header and input height */ +} + +.permissions-list-wrapper { + overflow-y: auto; + max-height: 100%; + padding-right: 10px; + padding-bottom: 10px; +} + +.permission-category { + margin-bottom: 20px; + border: 1px solid #e0e0e0; + border-radius: 8px; + padding: 15px; +} + +.category-header { + display: flex; + justify-content: space-between; + align-items: center; + border-bottom: 1px solid #e0e0e0; + padding-bottom: 10px; + margin-bottom: 10px; +} + +.category-header h3 { + margin: 0; + font-size: 16px; +} + +.category-header button { + background-color: #007bff; + color: white; + border: none; + border-radius: 5px; + padding: 5px 10px; + cursor: pointer; + font-size: 14px; +} + +.permissions-list { + display: flex; + flex-wrap: wrap; + margin-top: 10px; +} + +.permission-item { + width: 50%; + box-sizing: border-box; +} + +.permission-item label { + font-size: 14px; + display: flex; + align-items: center; + padding: 5px 0; +} + +.permission-item input { + margin-right: 10px; +} diff --git a/packages/ui/src/views/roles/CreateEditRoleDialog.jsx b/packages/ui/src/views/roles/CreateEditRoleDialog.jsx new file mode 100644 index 00000000000..9592966c72d --- /dev/null +++ b/packages/ui/src/views/roles/CreateEditRoleDialog.jsx @@ -0,0 +1,421 @@ +import { createPortal } from 'react-dom' +import PropTypes from 'prop-types' +import { useState, useEffect } from 'react' +import { useDispatch, useSelector } from 'react-redux' +import { enqueueSnackbar as enqueueSnackbarAction, closeSnackbar as closeSnackbarAction } from '@/store/actions' + +// Material +import { Box, Typography, OutlinedInput, Button, Dialog, DialogActions, DialogContent, DialogTitle } from '@mui/material' + +// Project imports +import { StyledButton } from '@/ui-component/button/StyledButton' +import ConfirmDialog from '@/ui-component/dialog/ConfirmDialog' + +// Icons +import { IconX, IconUser } from '@tabler/icons-react' + +// API +import authApi from '@/api/auth' +import roleApi from '@/api/role' + +// Hooks +import useApi from '@/hooks/useApi' +import { useConfig } from '@/store/context/ConfigContext' + +// utils +import useNotifier from '@/utils/useNotifier' + +// const +import { HIDE_CANVAS_DIALOG, SHOW_CANVAS_DIALOG } from '@/store/actions' + +import './CreateEditRoleDialog.css' + +const CreateEditRoleDialog = ({ show, dialogProps, onCancel, onConfirm, setError }) => { + const portalElement = document.getElementById('portal') + + const dispatch = useDispatch() + const { isEnterpriseLicensed } = useConfig() + + // ==============================|| Snackbar ||============================== // + + useNotifier() + + const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) + const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) + + const [roleName, setRoleName] = useState('') + const [roleDescription, setRoleDescription] = useState('') + const [selectedPermissions, setSelectedPermissions] = useState({}) + const [permissions, setPermissions] = useState({}) + const [dialogData, setDialogData] = useState({}) + + const getAllPermissionsApi = useApi(authApi.getAllPermissions) + const currentUser = useSelector((state) => state.auth.user) + + const handleRoleNameChange = (event) => { + setRoleName(event.target.value) + } + const handleRoleDescChange = (event) => { + setRoleDescription(event.target.value) + } + + const handlePermissionChange = (category, key) => { + setSelectedPermissions((prevPermissions) => { + const updatedCategoryPermissions = { + ...prevPermissions[category], + [key]: !prevPermissions[category]?.[key] + } + + if (category === 'templates') { + if (key !== 'templates:marketplace' && key !== 'templates:custom') { + updatedCategoryPermissions['templates:marketplace'] = true + updatedCategoryPermissions['templates:custom'] = true + } + } else { + const viewPermissionKey = `${category}:view` + if (key !== viewPermissionKey) { + const hasEnabledPermissions = Object.keys(updatedCategoryPermissions).some( + ([permissionKey, isEnabled]) => permissionKey !== viewPermissionKey && isEnabled + ) + if (hasEnabledPermissions) { + updatedCategoryPermissions[viewPermissionKey] = true + } + } else { + const hasEnabledPermissions = Object.keys(updatedCategoryPermissions).some( + ([permissionKey, isEnabled]) => permissionKey === viewPermissionKey && isEnabled + ) + if (hasEnabledPermissions) { + updatedCategoryPermissions[key] = true + } + } + } + + return { + ...prevPermissions, + [category]: updatedCategoryPermissions + } + }) + } + + const isCheckboxDisabled = (permissions, category, key) => { + if (category === 'templates') { + // For templates, disable marketplace and custom view if any other permission is enabled + if (key === 'templates:marketplace' || key === 'templates:custom') { + return Object.entries(permissions[category] || {}).some( + ([permKey, isEnabled]) => permKey !== 'templates:marketplace' && permKey !== 'templates:custom' && isEnabled + ) + } + } else { + const viewPermissionKey = `${category}:view` + if (key === viewPermissionKey) { + // Disable the view permission if any other permission is enabled + return Object.entries(permissions[category] || {}).some( + ([permKey, isEnabled]) => permKey !== viewPermissionKey && isEnabled + ) + } + } + + // Non-view permissions are never disabled + return false + } + + const handleSelectAll = (category) => { + const allSelected = permissions[category].every((permission) => selectedPermissions[category]?.[permission.key]) + setSelectedPermissions((prevPermissions) => ({ + ...prevPermissions, + [category]: Object.fromEntries(permissions[category].map((permission) => [permission.key, !allSelected])) + })) + } + + useEffect(() => { + if ((dialogProps.type === 'EDIT' || dialogProps.type === 'VIEW') && dialogProps.data) { + setDialogData(dialogProps.data) + } + getAllPermissionsApi.request() + return () => { + setRoleName('') + setRoleDescription('') + setSelectedPermissions({}) + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [dialogProps]) + + useEffect(() => { + if (getAllPermissionsApi.error) { + setError(getAllPermissionsApi.error) + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getAllPermissionsApi.error]) + + useEffect(() => { + if (show) dispatch({ type: SHOW_CANVAS_DIALOG }) + else dispatch({ type: HIDE_CANVAS_DIALOG }) + return () => dispatch({ type: HIDE_CANVAS_DIALOG }) + }, [show, dispatch]) + + useEffect(() => { + if (getAllPermissionsApi.data) { + setRoleName(dialogData.name) + setRoleDescription(dialogData.description) + const permissions = getAllPermissionsApi.data + // Filter out enterprise permissions if not licensed + if (!isEnterpriseLicensed) { + Object.keys(permissions).forEach((category) => { + permissions[category] = permissions[category].filter((permission) => !permission.isEnterprise) + }) + // Remove categories that have no permissions left + Object.keys(permissions).forEach((category) => { + if (permissions[category].length === 0) { + delete permissions[category] + } + }) + } + setPermissions(permissions) + if ((dialogProps.type === 'EDIT' || dialogProps.type === 'VIEW') && dialogProps.data) { + const dialogDataPermissions = JSON.parse(dialogData.permissions) + if (dialogDataPermissions && dialogDataPermissions.length > 0) { + Object.keys(permissions).forEach((category) => { + Object.keys(permissions[category]).forEach((key) => { + dialogDataPermissions.forEach((perm) => { + if (perm === permissions[category][key].key) { + if (!selectedPermissions[category]) { + selectedPermissions[category] = {} + } + selectedPermissions[category][perm] = true + } + }) + }) + }) + setSelectedPermissions(selectedPermissions) + } + } + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getAllPermissionsApi.data]) + + const createRole = async () => { + try { + // if roleName has a space, raise an error + if (roleName.indexOf(' ') > -1) { + enqueueSnackbar({ + message: `Role Name cannot contain spaces.`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + return + } + const saveObj = { + name: roleName, + description: roleDescription, + createdBy: currentUser.id, + organizationId: currentUser.activeOrganizationId + } + const tempPermissions = Object.keys(selectedPermissions) + .map((category) => { + return Object.keys(selectedPermissions[category]).map((key) => { + if (selectedPermissions[category][key]) { + return key + } + }) + }) + .flat() + saveObj.permissions = JSON.stringify(tempPermissions) + let saveResp + if (dialogProps.type === 'EDIT') { + saveObj.id = dialogProps.data.id + saveObj.updatedBy = currentUser.id + saveResp = await roleApi.updateRole(saveObj) + } else { + saveResp = await roleApi.createRole(saveObj) + } + if (saveResp.data) { + enqueueSnackbar({ + message: dialogProps.type === 'EDIT' ? 'Role Updated Successfully' : 'New Role Created!', + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + onConfirm(saveResp.data.id) + } + } catch (error) { + enqueueSnackbar({ + message: `Failed : ${typeof error.response.data === 'object' ? error.response.data.message : error.response.data}`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + onCancel() + } + } + + const checkDisabled = () => { + if (dialogProps.type === 'VIEW') { + return true + } + if (!roleName || roleName === '') { + return true + } + if (!Object.keys(selectedPermissions).length || !ifPermissionContainsTrue(selectedPermissions)) { + return true + } + return false + } + + const ifPermissionContainsTrue = (obj) => { + for (const key in obj) { + if (typeof obj[key] === 'object' && obj[key] !== null) { + // Recursively check nested objects + if (ifPermissionContainsTrue(obj[key])) { + return true + } + } else if (obj[key] === true) { + return true + } + } + return false + } + + const component = show ? ( + + +
    + + {dialogProps.type === 'EDIT' ? 'Edit Role' : dialogProps.type === 'VIEW' ? 'View Role' : 'Create New Role'} +
    +
    + +
    + + + *  Role Name + + + + + + Role Description + + + +
    +

    Permissions

    +
    + {permissions && + Object.keys(permissions).map((category) => ( +
    +
    +

    + {category + .replace(/([A-Z])/g, ' $1') + .trim() + .toUpperCase()} +

    + +
    +
    + {permissions[category].map((permission, index) => ( +
    + +
    + ))} +
    +
    + ))} +
    +
    +
    +
    + + + {dialogProps.type !== 'VIEW' && ( + + {dialogProps.type !== 'EDIT' ? 'Create Role' : 'Update Role'} + + )} + + +
    + ) : null + + return createPortal(component, portalElement) +} + +CreateEditRoleDialog.propTypes = { + show: PropTypes.bool, + dialogProps: PropTypes.object, + onCancel: PropTypes.func, + onConfirm: PropTypes.func, + setError: PropTypes.func +} + +export default CreateEditRoleDialog diff --git a/packages/ui/src/views/roles/index.jsx b/packages/ui/src/views/roles/index.jsx new file mode 100644 index 00000000000..3254339de5e --- /dev/null +++ b/packages/ui/src/views/roles/index.jsx @@ -0,0 +1,609 @@ +import React from 'react' +import { useEffect, useState } from 'react' +import { useDispatch, useSelector } from 'react-redux' +import { enqueueSnackbar as enqueueSnackbarAction, closeSnackbar as closeSnackbarAction } from '@/store/actions' +import * as PropTypes from 'prop-types' + +// material-ui +import { styled } from '@mui/material/styles' +import { tableCellClasses } from '@mui/material/TableCell' +import { + Box, + Skeleton, + Stack, + Table, + TableBody, + TableCell, + TableContainer, + TableHead, + TableRow, + Paper, + useTheme, + Typography, + Button, + Drawer +} from '@mui/material' + +// project imports +import MainCard from '@/ui-component/cards/MainCard' +import { PermissionIconButton, StyledPermissionButton } from '@/ui-component/button/RBACButtons' +import ConfirmDialog from '@/ui-component/dialog/ConfirmDialog' +import ViewHeader from '@/layout/MainLayout/ViewHeader' +import ErrorBoundary from '@/ErrorBoundary' +import CreateEditRoleDialog from '@/views/roles/CreateEditRoleDialog' + +// API +import authApi from '@/api/auth' +import roleApi from '@/api/role' +import userApi from '@/api/user' + +// Hooks +import useApi from '@/hooks/useApi' +import useConfirm from '@/hooks/useConfirm' + +// utils +import useNotifier from '@/utils/useNotifier' + +// Icons +import { IconEdit, IconPlus, IconEye, IconEyeOff, IconX, IconTrash } from '@tabler/icons-react' +import roles_emptySVG from '@/assets/images/roles_empty.svg' + +import { useError } from '@/store/context/ErrorContext' + +const StyledTableCell = styled(TableCell)(({ theme }) => ({ + borderColor: theme.palette.grey[900] + 25, + + [`&.${tableCellClasses.head}`]: { + color: theme.palette.grey[900] + }, + [`&.${tableCellClasses.body}`]: { + fontSize: 14, + height: 48 + } +})) + +const StyledTableRow = styled(TableRow)(() => ({ + // hide last border + '&:last-child td, &:last-child th': { + border: 0 + } +})) + +function ViewPermissionsDrawer(props) { + const theme = useTheme() + const [permissions, setPermissions] = useState({}) + const [selectedPermissions, setSelectedPermissions] = useState({}) + + const { setError } = useError() + + const getAllPermissionsApi = useApi(authApi.getAllPermissions) + + useEffect(() => { + if (props.open) { + getAllPermissionsApi.request() + } + return () => { + setSelectedPermissions({}) + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [props.open]) + + useEffect(() => { + if (getAllPermissionsApi.error) { + setError(getAllPermissionsApi.error) + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getAllPermissionsApi.error]) + + useEffect(() => { + if (getAllPermissionsApi.data) { + const permissions = getAllPermissionsApi.data + setPermissions(permissions) + const rolePermissions = JSON.parse(props.role.permissions) + if (rolePermissions && rolePermissions.length > 0) { + Object.keys(permissions).forEach((category) => { + Object.keys(permissions[category]).forEach((key) => { + rolePermissions.forEach((perm) => { + if (perm === permissions[category][key].key) { + if (!selectedPermissions[category]) { + selectedPermissions[category] = {} + } + selectedPermissions[category][perm] = true + } + }) + }) + }) + setSelectedPermissions(selectedPermissions) + } + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getAllPermissionsApi.data]) + + return ( + props.setOpen(false)} sx={{ minWidth: 320 }}> + + + {props.role.name} + + {props.role.description && ( + + {props.role.description} + + )} + + + Permissions + + + {permissions && + Object.keys(permissions).map((category) => ( + + + + {category + .replace(/([A-Z])/g, ' $1') + .trim() + .toUpperCase()} + + + + {permissions[category].map((permission, index) => ( +
    + +
    + ))} +
    +
    + ))} +
    +
    +
    +
    + ) +} +ViewPermissionsDrawer.propTypes = { + open: PropTypes.bool, + setOpen: PropTypes.func, + role: PropTypes.any +} + +function ShowRoleRow(props) { + const [openAssignedUsersDrawer, setOpenAssignedUsersDrawer] = useState(false) + const [openViewPermissionsDrawer, setOpenViewPermissionsDrawer] = useState(false) + const [selectedRoleId, setSelectedRoleId] = useState('') + const [assignedUsers, setAssignedUsers] = useState([]) + + const theme = useTheme() + const customization = useSelector((state) => state.customization) + + const getAllUsersByRoleIdApi = useApi(userApi.getUserByRoleId) + + const handleViewAssignedUsers = (roleId) => { + setOpenAssignedUsersDrawer(true) + setSelectedRoleId(roleId) + } + + useEffect(() => { + if (getAllUsersByRoleIdApi.data) { + setAssignedUsers(getAllUsersByRoleIdApi.data) + } + }, [getAllUsersByRoleIdApi.data]) + + useEffect(() => { + if (open && selectedRoleId) { + getAllUsersByRoleIdApi.request(selectedRoleId) + } else { + setOpenAssignedUsersDrawer(false) + setSelectedRoleId('') + setAssignedUsers([]) + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [openAssignedUsersDrawer]) + + return ( + <> + + {props.role.name} + {props.role.description} + + + + {JSON.parse(props.role.permissions).map((d, key) => ( + + {d} + {', '} + + ))} + + setOpenViewPermissionsDrawer(!openViewPermissionsDrawer)} + > + + + + + + {props.role.userCount} + {props.role.userCount > 0 && ( + handleViewAssignedUsers(props.role.id)} + > + {props.role.userCount > 0 && openAssignedUsersDrawer ? : } + + )} + + + props.onEditClick(props.role)} + > + + + 0} + color='error' + title={props.role.userCount > 0 ? 'Remove users with the role from Workspace first' : 'Delete'} + onClick={() => props.onDeleteClick(props.role)} + > + + + + + setOpenAssignedUsersDrawer(false)} sx={{ minWidth: 320 }}> + + + Assigned Users + + + + + + User + Workspace + + + + {assignedUsers.map((item, index) => ( + + {item.user.name || item.user.email} + {item.workspace.name} + + ))} + +
    +
    +
    +
    + + + ) +} + +ShowRoleRow.propTypes = { + key: PropTypes.any, + role: PropTypes.any, + onViewClick: PropTypes.func, + onEditClick: PropTypes.func, + onDeleteClick: PropTypes.func, + open: PropTypes.bool, + theme: PropTypes.any +} + +// ==============================|| Roles ||============================== // + +const Roles = () => { + const theme = useTheme() + const customization = useSelector((state) => state.customization) + const dispatch = useDispatch() + useNotifier() + const { error, setError } = useError() + + const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) + const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) + + const [isLoading, setLoading] = useState(true) + + const [showCreateEditDialog, setShowCreateEditDialog] = useState(false) + const [dialogProps, setDialogProps] = useState({}) + + const { confirm } = useConfirm() + const currentUser = useSelector((state) => state.auth.user) + + const getAllRolesByOrganizationIdApi = useApi(roleApi.getAllRolesByOrganizationId) + + const [roles, setRoles] = useState([]) + const [search, setSearch] = useState('') + + const onSearchChange = (event) => { + setSearch(event.target.value) + } + + function filterUsers(data) { + return ( + (data.name && data.name.toLowerCase().indexOf(search.toLowerCase()) > -1) || + (data.description && data.description.toLowerCase().indexOf(search.toLowerCase()) > -1) + ) + } + + const addNew = () => { + const dialogProp = { + type: 'ADD', + cancelButtonName: 'Cancel', + confirmButtonName: 'Invite', + data: {} + } + setDialogProps(dialogProp) + setShowCreateEditDialog(true) + } + + const edit = (role) => { + const dialogProp = { + type: 'EDIT', + cancelButtonName: 'Cancel', + confirmButtonName: 'Invite', + data: { + ...role + } + } + setDialogProps(dialogProp) + setShowCreateEditDialog(true) + } + + const view = (role) => { + const dialogProp = { + type: 'VIEW', + cancelButtonName: 'Cancel', + confirmButtonName: 'Invite', + data: { + ...role + } + } + setDialogProps(dialogProp) + setShowCreateEditDialog(true) + } + + const deleteRole = async (role) => { + const confirmPayload = { + title: `Delete`, + description: `Delete Role ${role.name}?`, + confirmButtonName: 'Delete', + cancelButtonName: 'Cancel' + } + const isConfirmed = await confirm(confirmPayload) + + if (isConfirmed) { + try { + const deleteResp = await roleApi.deleteRole(role.id, currentUser.activeOrganizationId) + if (deleteResp.data) { + enqueueSnackbar({ + message: 'Role deleted', + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + onConfirm() + } + } catch (error) { + enqueueSnackbar({ + message: `Failed to delete Role: ${ + typeof error.response.data === 'object' ? error.response.data.message : error.response.data + }`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + } + } + } + + const onConfirm = () => { + setShowCreateEditDialog(false) + getAllRolesByOrganizationIdApi.request(currentUser.activeOrganizationId) + } + + useEffect(() => { + getAllRolesByOrganizationIdApi.request(currentUser.activeOrganizationId) + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []) + + useEffect(() => { + setLoading(getAllRolesByOrganizationIdApi.loading) + }, [getAllRolesByOrganizationIdApi.loading]) + + useEffect(() => { + if (getAllRolesByOrganizationIdApi.error) { + setError(getAllRolesByOrganizationIdApi.error) + } + }, [getAllRolesByOrganizationIdApi.error, setError]) + + useEffect(() => { + if (getAllRolesByOrganizationIdApi.data) { + setRoles(getAllRolesByOrganizationIdApi.data) + } + }, [getAllRolesByOrganizationIdApi.data]) + + return ( + <> + + {error ? ( + + ) : ( + + + } + id='btn_createUser' + > + Add Role + + + {!isLoading && roles.length === 0 ? ( + + + roles_emptySVG + +
    No Roles Yet
    +
    + ) : ( + <> + + + + + + + Name + Description + Permissions + Assigned Users + + + + + {isLoading ? ( + <> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ) : ( + <> + {roles.filter(filterUsers).map((role, index) => ( + + ))} + + )} + +
    +
    +
    +
    + + )} +
    + )} +
    + {showCreateEditDialog && ( + setShowCreateEditDialog(false)} + onConfirm={onConfirm} + setError={setError} + > + )} + + + ) +} + +export default Roles diff --git a/packages/ui/src/views/serverlogs/index.jsx b/packages/ui/src/views/serverlogs/index.jsx new file mode 100644 index 00000000000..a0b935a12ce --- /dev/null +++ b/packages/ui/src/views/serverlogs/index.jsx @@ -0,0 +1,315 @@ +import { useState, useEffect, forwardRef } from 'react' +import PropTypes from 'prop-types' + +// project imports +import MainCard from '@/ui-component/cards/MainCard' +import DatePicker from 'react-datepicker' +import { gridSpacing } from '@/store/constant' +import CodeMirror from '@uiw/react-codemirror' +import { EditorView } from '@codemirror/view' +import { markdown } from '@codemirror/lang-markdown' +import { sublime } from '@uiw/codemirror-theme-sublime' + +// material-ui +import { Box, Skeleton, Stack, Select, MenuItem, ListItemButton } from '@mui/material' +import { useTheme } from '@mui/material/styles' + +// ui +import ViewHeader from '@/layout/MainLayout/ViewHeader' +import ErrorBoundary from '@/ErrorBoundary' + +import useApi from '@/hooks/useApi' +import logsApi from '@/api/log' +import { useError } from '@/store/context/ErrorContext' + +import LogsEmptySVG from '@/assets/images/logs_empty.svg' +import 'react-datepicker/dist/react-datepicker.css' + +const DatePickerCustomInput = forwardRef(function DatePickerCustomInput({ value, onClick }, ref) { + return ( + + {value} + + ) +}) + +DatePickerCustomInput.propTypes = { + value: PropTypes.string, + onClick: PropTypes.func +} + +const searchTimeRanges = [ + 'Last hour', + 'Last 4 hours', + 'Last 24 hours', + 'Last 2 days', + 'Last 7 days', + 'Last 14 days', + 'Last 1 month', + 'Last 2 months', + 'Last 3 months', + 'Custom' +] + +const getDateBefore = (unit, value) => { + const now = new Date() + if (unit === 'hours') now.setHours(now.getHours() - value) + if (unit === 'days') now.setDate(now.getDate() - value) + if (unit === 'months') now.setMonth(now.getMonth() - value) + return now +} + +const getDateTimeFormatted = (date) => { + const now = date ? date : new Date() + const year = now.getFullYear() + const month = (now.getMonth() + 1).toString().padStart(2, '0') // +1 because getMonth() returns 0 for January, 1 for February, etc. + const day = now.getDate().toString().padStart(2, '0') + const hour = now.getHours().toString().padStart(2, '0') + + return `${year}-${month}-${day}-${hour}` +} + +const subtractTime = (months, days, hours) => { + let checkDate = new Date() + + if (months > 0) { + checkDate.setMonth(checkDate.getMonth() - months) + } else { + checkDate.setMonth(checkDate.getMonth()) + } + + if (days > 0) { + checkDate.setDate(checkDate.getDate() - days) + } else { + checkDate.setDate(checkDate.getDate()) + } + + if (hours > 0) { + checkDate.setHours(checkDate.getHours() - hours) + } else { + checkDate.setHours(checkDate.getHours()) + } + + const year = checkDate.getFullYear() + const month = (checkDate.getMonth() + 1).toString().padStart(2, '0') + const day = checkDate.getDate().toString().padStart(2, '0') + const hour = checkDate.getHours().toString().padStart(2, '0') + + return `${year}-${month}-${day}-${hour}` +} + +const Logs = () => { + const colorTheme = useTheme() + + const customStyle = EditorView.baseTheme({ + '&': { + color: '#191b1f', + padding: '10px', + borderRadius: '15px' + }, + '.cm-placeholder': { + color: 'rgba(120, 120, 120, 0.5)' + }, + '.cm-content': { + fontFamily: 'Roboto, sans-serif', + fontSize: '0.95rem', + letterSpacing: '0em', + fontWeight: 400, + lineHeight: '1.5em', + color: colorTheme.darkTextPrimary + } + }) + + const getLogsApi = useApi(logsApi.getLogs) + const { error } = useError() + + const [isLoading, setLoading] = useState(true) + const [logData, setLogData] = useState('') + const [selectedTimeSearch, setSelectedTimeSearch] = useState('Last hour') + const [startDate, setStartDate] = useState(getDateBefore('hours', 1)) + const [endDate, setEndDate] = useState(new Date()) + + const handleTimeSelectionChange = (event) => { + setSelectedTimeSearch(event.target.value) + switch (event.target.value) { + case 'Last hour': + getLogsApi.request(subtractTime(0, 0, 1), getDateTimeFormatted()) + break + case 'Last 4 hours': + getLogsApi.request(subtractTime(0, 0, 4), getDateTimeFormatted()) + break + case 'Last 24 hours': + getLogsApi.request(subtractTime(0, 0, 24), getDateTimeFormatted()) + break + case 'Last 2 days': + getLogsApi.request(subtractTime(0, 2, 0), getDateTimeFormatted()) + break + case 'Last 7 days': + getLogsApi.request(subtractTime(0, 7, 0), getDateTimeFormatted()) + break + case 'Last 14 days': + getLogsApi.request(subtractTime(0, 14, 0), getDateTimeFormatted()) + break + case 'Last 1 month': + getLogsApi.request(subtractTime(1, 0, 0), getDateTimeFormatted()) + break + case 'Last 2 months': + getLogsApi.request(subtractTime(2, 0, 0), getDateTimeFormatted()) + break + case 'Last 3 months': + getLogsApi.request(subtractTime(3, 0, 0), getDateTimeFormatted()) + break + case 'Custom': + setStartDate(getDateBefore('hours', 1)) + setEndDate(new Date()) + getLogsApi.request(subtractTime(0, 0, 1), getDateTimeFormatted()) + break + default: + break + } + } + + const onStartDateSelected = (date) => { + setStartDate(date) + getLogsApi.request(getDateTimeFormatted(date), getDateTimeFormatted(endDate)) + } + + const onEndDateSelected = (date) => { + setEndDate(date) + getLogsApi.request(getDateTimeFormatted(startDate), getDateTimeFormatted(date)) + } + + useEffect(() => { + const currentTimeFormatted = getDateTimeFormatted() + const startTimeFormatted = subtractTime(0, 0, 1) + getLogsApi.request(startTimeFormatted, currentTimeFormatted) + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []) + + useEffect(() => { + setLoading(getLogsApi.loading) + }, [getLogsApi.loading]) + + useEffect(() => { + if (getLogsApi.data && getLogsApi.data.length > 0) { + let totalLogs = '' + for (const logData of getLogsApi.data) { + totalLogs += logData + '\n' + } + setLogData(totalLogs) + } + }, [getLogsApi.data]) + + return ( + + {error ? ( + + ) : ( + + + {isLoading ? ( + + + + + + + + + + + + + + + + ) : ( + <> + + + {selectedTimeSearch === 'Custom' && ( + <> + + From + onStartDateSelected(date)} + selectsStart + startDate={startDate} + endDate={endDate} + maxDate={endDate} + showTimeSelect + timeFormat='HH:mm' + timeIntervals={60} + dateFormat='yyyy MMMM d, h aa' + customInput={} + /> + + + To + onEndDateSelected(date)} + selectsEnd + showTimeSelect + timeFormat='HH:mm' + timeIntervals={60} + startDate={startDate} + endDate={endDate} + minDate={startDate} + maxDate={new Date()} + dateFormat='yyyy MMMM d, h aa' + customInput={} + /> + + + )} + + {logData ? ( + + ) : ( + + + LogsEmptySVG + +
    No Logs Yet
    +
    + )} + + )} +
    + )} +
    + ) +} + +export default Logs diff --git a/packages/ui/src/views/settings/index.jsx b/packages/ui/src/views/settings/index.jsx index 4ef37c74ad7..9aafd1a7367 100644 --- a/packages/ui/src/views/settings/index.jsx +++ b/packages/ui/src/views/settings/index.jsx @@ -16,6 +16,7 @@ import Transitions from '@/ui-component/extended/Transitions' import settings from '@/menu-items/settings' import agentsettings from '@/menu-items/agentsettings' import customAssistantSettings from '@/menu-items/customassistant' +import { useAuth } from '@/hooks/useAuth' // ==============================|| SETTINGS ||============================== // @@ -25,6 +26,7 @@ const Settings = ({ chatflow, isSettingsOpen, isCustomAssistant, anchorEl, isAge const customization = useSelector((state) => state.customization) const inputFile = useRef(null) const [open, setOpen] = useState(false) + const { hasPermission } = useAuth() const handleFileUpload = (e) => { if (!e.target.files) return @@ -64,6 +66,9 @@ const Settings = ({ chatflow, isSettingsOpen, isCustomAssistant, anchorEl, isAge // settings list items const items = settingsMenu.map((menu) => { + if (menu.permission && !hasPermission(menu.permission)) { + return null + } const Icon = menu.icon const itemIcon = menu?.icon ? ( diff --git a/packages/ui/src/views/tools/ToolDialog.jsx b/packages/ui/src/views/tools/ToolDialog.jsx index de048b63909..b73b15ec19a 100644 --- a/packages/ui/src/views/tools/ToolDialog.jsx +++ b/packages/ui/src/views/tools/ToolDialog.jsx @@ -14,6 +14,9 @@ import DeleteIcon from '@mui/icons-material/Delete' import ConfirmDialog from '@/ui-component/dialog/ConfirmDialog' import { CodeEditor } from '@/ui-component/editor/CodeEditor' import HowToUseFunctionDialog from './HowToUseFunctionDialog' +import { PermissionButton, StyledPermissionButton } from '@/ui-component/button/RBACButtons' +import { Available } from '@/ui-component/rbac/available' +import ExportAsTemplateDialog from '@/ui-component/dialog/ExportAsTemplateDialog' import PasteJSONDialog from './PasteJSONDialog' // Icons @@ -30,7 +33,6 @@ import useApi from '@/hooks/useApi' import useNotifier from '@/utils/useNotifier' import { generateRandomGradient, formatDataGridRows } from '@/utils/genericHelper' import { HIDE_CANVAS_DIALOG, SHOW_CANVAS_DIALOG } from '@/store/actions' -import ExportAsTemplateDialog from '@/ui-component/dialog/ExportAsTemplateDialog' const exampleAPIFunc = `/* * You can use any libraries imported in Flowise @@ -432,7 +434,8 @@ const ToolDialog = ({ show, dialogProps, onUseTemplate, onCancel, onConfirm, set {dialogProps.type === 'EDIT' && ( <> - - + )} @@ -561,23 +569,26 @@ const ToolDialog = ({ show, dialogProps, onUseTemplate, onCancel, onConfirm, set {dialogProps.type === 'EDIT' && ( - deleteTool()}> + deleteTool()}> Delete - + )} {dialogProps.type === 'TEMPLATE' && ( - - Use Template - + + + Use Template + + )} {dialogProps.type !== 'TEMPLATE' && ( - (dialogProps.type === 'ADD' || dialogProps.type === 'IMPORT' ? addNewTool() : saveTool())} > {dialogProps.confirmButtonName} - + )} diff --git a/packages/ui/src/views/tools/index.jsx b/packages/ui/src/views/tools/index.jsx index 1a2e5d803d2..29307e8a3bc 100644 --- a/packages/ui/src/views/tools/index.jsx +++ b/packages/ui/src/views/tools/index.jsx @@ -1,37 +1,38 @@ import { useEffect, useState, useRef } from 'react' // material-ui -import { Box, Stack, Button, ButtonGroup, Skeleton, ToggleButtonGroup, ToggleButton } from '@mui/material' +import { Box, Stack, ButtonGroup, Skeleton, ToggleButtonGroup, ToggleButton } from '@mui/material' +import { useTheme } from '@mui/material/styles' // project imports import MainCard from '@/ui-component/cards/MainCard' import ItemCard from '@/ui-component/cards/ItemCard' -import { gridSpacing } from '@/store/constant' -import ToolEmptySVG from '@/assets/images/tools_empty.svg' -import { StyledButton } from '@/ui-component/button/StyledButton' import ToolDialog from './ToolDialog' +import ViewHeader from '@/layout/MainLayout/ViewHeader' +import ErrorBoundary from '@/ErrorBoundary' import { ToolsTable } from '@/ui-component/table/ToolsListTable' +import { PermissionButton, StyledPermissionButton } from '@/ui-component/button/RBACButtons' // API import toolsApi from '@/api/tools' // Hooks import useApi from '@/hooks/useApi' +import { useError } from '@/store/context/ErrorContext' +import { gridSpacing } from '@/store/constant' // icons import { IconPlus, IconFileUpload, IconLayoutGrid, IconList } from '@tabler/icons-react' -import ViewHeader from '@/layout/MainLayout/ViewHeader' -import ErrorBoundary from '@/ErrorBoundary' -import { useTheme } from '@mui/material/styles' +import ToolEmptySVG from '@/assets/images/tools_empty.svg' -// ==============================|| CHATFLOWS ||============================== // +// ==============================|| TOOLS ||============================== // const Tools = () => { const theme = useTheme() const getAllToolsApi = useApi(toolsApi.getAllTools) + const { error, setError } = useError() const [isLoading, setLoading] = useState(true) - const [error, setError] = useState(null) const [showDialog, setShowDialog] = useState(false) const [dialogProps, setDialogProps] = useState({}) const [view, setView] = useState(localStorage.getItem('toolsDisplayStyle') || 'card') @@ -125,12 +126,6 @@ const Tools = () => { setLoading(getAllToolsApi.loading) }, [getAllToolsApi.loading]) - useEffect(() => { - if (getAllToolsApi.error) { - setError(getAllToolsApi.error) - } - }, [getAllToolsApi.error]) - return ( <> @@ -178,14 +173,15 @@ const Tools = () => { - + { /> - } sx={{ borderRadius: 2, height: 40 }} > Create - + {!view || view === 'card' ? ( diff --git a/packages/ui/src/views/users/EditUserDialog.jsx b/packages/ui/src/views/users/EditUserDialog.jsx new file mode 100644 index 00000000000..a40815ea1c8 --- /dev/null +++ b/packages/ui/src/views/users/EditUserDialog.jsx @@ -0,0 +1,218 @@ +/* File temporarily not used until we allow user to change role */ +import { createPortal } from 'react-dom' +import PropTypes from 'prop-types' +import { useState, useEffect } from 'react' +import { useDispatch, useSelector } from 'react-redux' + +// Material +import { Button, Dialog, DialogActions, DialogContent, DialogTitle, Box, Typography, OutlinedInput } from '@mui/material' + +// Project imports +import { StyledButton } from '@/ui-component/button/StyledButton' +import ConfirmDialog from '@/ui-component/dialog/ConfirmDialog' +import { Dropdown } from '@/ui-component/dropdown/Dropdown' + +// Icons +import { IconX, IconUser } from '@tabler/icons-react' + +// API +import userApi from '@/api/user' + +// utils +import useNotifier from '@/utils/useNotifier' + +// store +import { HIDE_CANVAS_DIALOG, SHOW_CANVAS_DIALOG } from '@/store/actions' +import { enqueueSnackbar as enqueueSnackbarAction, closeSnackbar as closeSnackbarAction } from '@/store/actions' + +const statuses = [ + { + label: 'Active', + name: 'active' + }, + { + label: 'Inactive', + name: 'inactive' + } +] + +const EditUserDialog = ({ show, dialogProps, onCancel, onConfirm, setError }) => { + const portalElement = document.getElementById('portal') + const currentUser = useSelector((state) => state.auth.user) + + const dispatch = useDispatch() + + useNotifier() + + const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) + const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) + + const [userName, setUserName] = useState('') + const [userEmail, setUserEmail] = useState('') + const [status, setStatus] = useState('active') + const [user, setUser] = useState({}) + + useEffect(() => { + if (dialogProps.type === 'EDIT' && dialogProps.data) { + setUser(dialogProps.data.user) + setUserEmail(dialogProps.data.user.email) + setUserName(dialogProps.data.user.name) + setStatus(dialogProps.data.user.status) + } + + return () => { + setUserEmail('') + setUserName('') + setStatus('active') + setUser({}) + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [dialogProps]) + + useEffect(() => { + if (show) dispatch({ type: SHOW_CANVAS_DIALOG }) + else dispatch({ type: HIDE_CANVAS_DIALOG }) + return () => dispatch({ type: HIDE_CANVAS_DIALOG }) + }, [show, dispatch]) + + const updateUser = async () => { + try { + const saveObj = { + userId: user.id, + organizationId: currentUser.activeOrganizationId, + status: status + } + + const saveResp = await userApi.updateOrganizationUser(saveObj) + if (saveResp.data) { + enqueueSnackbar({ + message: 'User Details Updated', + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + onConfirm(saveResp.data.id) + } + } catch (error) { + setError(err) + enqueueSnackbar({ + message: `Failed to update User: ${ + typeof error.response.data === 'object' ? error.response.data.message : error.response.data + }`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + onCancel() + } + } + + const component = show ? ( + + +
    + + {'Edit User'} +
    +
    + + +
    + + Email * + + +
    +
    + setUserEmail(e.target.value)} + value={userEmail ?? ''} + /> +
    + +
    + Name + +
    +
    + setUserName(e.target.value)} + value={userName ?? ''} + /> +
    + +
    + + Account Status * + +
    +
    + setStatus(newValue)} + value={status ?? 'choose an option'} + id='dropdown_status' + /> + {dialogProps?.data?.isOrgOwner && ( + + Cannot change status of the organization owner! + + )} +
    +
    + + updateUser()} id='btn_confirmInviteUser'> + {dialogProps.confirmButtonName} + + + +
    + ) : null + + return createPortal(component, portalElement) +} + +EditUserDialog.propTypes = { + show: PropTypes.bool, + dialogProps: PropTypes.object, + onCancel: PropTypes.func, + onConfirm: PropTypes.func, + setError: PropTypes.func +} + +export default EditUserDialog diff --git a/packages/ui/src/views/users/index.jsx b/packages/ui/src/views/users/index.jsx new file mode 100644 index 00000000000..ce6ad2e0331 --- /dev/null +++ b/packages/ui/src/views/users/index.jsx @@ -0,0 +1,539 @@ +import React, { useEffect, useState } from 'react' +import { useDispatch, useSelector } from 'react-redux' +import moment from 'moment' +import * as PropTypes from 'prop-types' + +// material-ui +import { + Button, + Box, + Skeleton, + Stack, + Table, + TableBody, + TableContainer, + TableHead, + TableRow, + Paper, + useTheme, + Chip, + Drawer, + Typography, + CircularProgress +} from '@mui/material' + +// project imports +import MainCard from '@/ui-component/cards/MainCard' +import ConfirmDialog from '@/ui-component/dialog/ConfirmDialog' +import ViewHeader from '@/layout/MainLayout/ViewHeader' +import ErrorBoundary from '@/ErrorBoundary' +import EditUserDialog from '@/views/users/EditUserDialog' +import { StyledTableCell, StyledTableRow } from '@/ui-component/table/TableStyles' +import InviteUsersDialog from '@/ui-component/dialog/InviteUsersDialog' +import { PermissionIconButton, StyledPermissionButton } from '@/ui-component/button/RBACButtons' + +// API +import userApi from '@/api/user' + +// Hooks +import useApi from '@/hooks/useApi' +import useConfirm from '@/hooks/useConfirm' + +// utils +import useNotifier from '@/utils/useNotifier' + +// Icons +import { IconTrash, IconEdit, IconX, IconPlus, IconUser, IconEyeOff, IconEye, IconUserStar } from '@tabler/icons-react' +import users_emptySVG from '@/assets/images/users_empty.svg' + +// store +import { useError } from '@/store/context/ErrorContext' +import { enqueueSnackbar as enqueueSnackbarAction, closeSnackbar as closeSnackbarAction } from '@/store/actions' + +function ShowUserRow(props) { + const customization = useSelector((state) => state.customization) + + const [open, setOpen] = useState(false) + const [userRoles, setUserRoles] = useState([]) + + const theme = useTheme() + + const getWorkspacesByUserId = useApi(userApi.getWorkspacesByOrganizationIdUserId) + + const handleViewUserRoles = (userId, organizationId) => { + setOpen(!open) + getWorkspacesByUserId.request(organizationId, userId) + } + + useEffect(() => { + if (getWorkspacesByUserId.data) { + setUserRoles(getWorkspacesByUserId.data) + } + }, [getWorkspacesByUserId.data]) + + useEffect(() => { + if (!open) { + setOpen(false) + setUserRoles([]) + } + }, [open]) + + const currentUser = useSelector((state) => state.auth.user) + + return ( + + + +
    +
    + {props?.row?.isOrgOwner ? ( + + ) : ( + + )} +
    +
    +
    + + {props.row.user.name ?? ''} + {props.row.user.email && ( + <> +
    + {props.row.user.email} + + )} + + {props.row.isOrgOwner && ( + <> + {' '} +
    + {' '} + + )} +
    + + {props.row.roleCount} + handleViewUserRoles(props.row.userId, props.row.organizationId)} + > + {props.row.roleCount > 0 && open ? : } + + + + {'ACTIVE' === props.row.status.toUpperCase() && } + {'INVITED' === props.row.status.toUpperCase() && } + {'INACTIVE' === props.row.status.toUpperCase() && } + + {!props.row.lastLogin ? 'Never' : moment(props.row.lastLogin).format('DD/MM/YYYY HH:mm')} + + {props.row.status.toUpperCase() === 'INVITED' && ( + props.onEditClick(props.row)} + > + + + )} + {!props.row.isOrgOwner && + props.row.userId !== currentUser.id && + (props.deletingUserId === props.row.user.id ? ( + + ) : ( + props.onDeleteClick(props.row.user)} + > + + + ))} + +
    + setOpen(false)} sx={{ minWidth: 320 }}> + + + Assigned Roles + + + + + + Role + Workspace + + + + {userRoles.map((item, index) => ( + + {item.role.name} + + {item.workspace.name} + {/* {assignment.active && } */} + + + ))} + +
    +
    +
    +
    +
    + ) +} + +ShowUserRow.propTypes = { + row: PropTypes.any, + onDeleteClick: PropTypes.func, + onEditClick: PropTypes.func, + open: PropTypes.bool, + theme: PropTypes.any, + deletingUserId: PropTypes.string +} + +// ==============================|| Users ||============================== // + +const Users = () => { + const theme = useTheme() + const customization = useSelector((state) => state.customization) + const dispatch = useDispatch() + useNotifier() + const { error, setError } = useError() + const currentUser = useSelector((state) => state.auth.user) + + const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) + const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) + + const [isLoading, setLoading] = useState(true) + const [showInviteDialog, setShowInviteDialog] = useState(false) + const [showEditDialog, setShowEditDialog] = useState(false) + const [inviteDialogProps, setInviteDialogProps] = useState({}) + const [users, setUsers] = useState([]) + const [search, setSearch] = useState('') + const [deletingUserId, setDeletingUserId] = useState(null) + + const { confirm } = useConfirm() + + const getAllUsersByOrganizationIdApi = useApi(userApi.getAllUsersByOrganizationId) + + const onSearchChange = (event) => { + setSearch(event.target.value) + } + + function filterUsers(data) { + return ( + data.user.name?.toLowerCase().indexOf(search.toLowerCase()) > -1 || + data.user.email.toLowerCase().indexOf(search.toLowerCase()) > -1 + ) + } + + const addNew = () => { + const dialogProp = { + type: 'ADD', + cancelButtonName: 'Cancel', + confirmButtonName: 'Send Invite', + data: null + } + setInviteDialogProps(dialogProp) + setShowInviteDialog(true) + } + + const edit = (user) => { + if (user.status.toUpperCase() === 'INVITED') { + editInvite(user) + } else { + editUser(user) + } + } + + const editInvite = (user) => { + const dialogProp = { + type: 'EDIT', + cancelButtonName: 'Cancel', + confirmButtonName: 'Update Invite', + data: user + } + setInviteDialogProps(dialogProp) + setShowInviteDialog(true) + } + + const editUser = (user) => { + const dialogProp = { + type: 'EDIT', + cancelButtonName: 'Cancel', + confirmButtonName: 'Save', + data: user + } + setInviteDialogProps(dialogProp) + setShowEditDialog(true) + } + + const deleteUser = async (user) => { + const confirmPayload = { + title: `Delete`, + description: `Remove ${user.name ?? user.email} from organization?`, + confirmButtonName: 'Delete', + cancelButtonName: 'Cancel' + } + const isConfirmed = await confirm(confirmPayload) + + if (isConfirmed) { + try { + setDeletingUserId(user.id) + const deleteResp = await userApi.deleteOrganizationUser(currentUser.activeOrganizationId, user.id) + if (deleteResp.data) { + enqueueSnackbar({ + message: 'User removed from organization successfully', + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + onConfirm() + } + } catch (error) { + enqueueSnackbar({ + message: `Failed to delete User: ${ + typeof error.response.data === 'object' ? error.response.data.message : error.response.data + }`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + } finally { + setDeletingUserId(null) + } + } + } + + const onConfirm = () => { + setShowInviteDialog(false) + setShowEditDialog(false) + getAllUsersByOrganizationIdApi.request(currentUser.activeOrganizationId) + } + + useEffect(() => { + getAllUsersByOrganizationIdApi.request(currentUser.activeOrganizationId) + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []) + + useEffect(() => { + setLoading(getAllUsersByOrganizationIdApi.loading) + }, [getAllUsersByOrganizationIdApi.loading]) + + useEffect(() => { + if (getAllUsersByOrganizationIdApi.error) { + setError(getAllUsersByOrganizationIdApi.error) + } + }, [getAllUsersByOrganizationIdApi.error, setError]) + + useEffect(() => { + if (getAllUsersByOrganizationIdApi.data) { + const users = getAllUsersByOrganizationIdApi.data || [] + const orgAdmin = users.find((user) => user.isOrgOwner === true) + if (orgAdmin) { + users.splice(users.indexOf(orgAdmin), 1) + users.unshift(orgAdmin) + } + setUsers(users) + } + }, [getAllUsersByOrganizationIdApi.data]) + + return ( + <> + + {error ? ( + + ) : ( + + + } + id='btn_createUser' + > + Invite User + + + {!isLoading && users.length === 0 ? ( + + + users_emptySVG + +
    No Users Yet
    +
    + ) : ( + <> + + + + + + +   + Email/Name + Assigned Roles + Status + Last Login + + + + + {isLoading ? ( + <> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ) : ( + <> + {users.filter(filterUsers).map((item, index) => ( + + ))} + + )} + +
    +
    +
    +
    + + )} +
    + )} +
    + {showInviteDialog && ( + setShowInviteDialog(false)} + onConfirm={onConfirm} + > + )} + {showEditDialog && ( + setShowEditDialog(false)} + onConfirm={onConfirm} + setError={setError} + > + )} + + + ) +} + +export default Users diff --git a/packages/ui/src/views/variables/index.jsx b/packages/ui/src/views/variables/index.jsx index 54d456e89a2..116939f8228 100644 --- a/packages/ui/src/views/variables/index.jsx +++ b/packages/ui/src/views/variables/index.jsx @@ -25,13 +25,14 @@ import { // project imports import MainCard from '@/ui-component/cards/MainCard' -import { StyledButton } from '@/ui-component/button/StyledButton' import ConfirmDialog from '@/ui-component/dialog/ConfirmDialog' -import { refreshVariablesCache } from '@/ui-component/input/suggestionOption' import AddEditVariableDialog from './AddEditVariableDialog' import HowToUseVariablesDialog from './HowToUseVariablesDialog' import ViewHeader from '@/layout/MainLayout/ViewHeader' import ErrorBoundary from '@/ErrorBoundary' +import { StyledPermissionButton } from '@/ui-component/button/RBACButtons' +import { Available } from '@/ui-component/rbac/available' +import { refreshVariablesCache } from '@/ui-component/input/suggestionOption' // API import variablesApi from '@/api/variables' @@ -47,6 +48,9 @@ import useNotifier from '@/utils/useNotifier' import { IconTrash, IconEdit, IconX, IconPlus, IconVariable } from '@tabler/icons-react' import VariablesEmptySVG from '@/assets/images/variables_empty.svg' +// const +import { useError } from '@/store/context/ErrorContext' + const StyledTableCell = styled(TableCell)(({ theme }) => ({ borderColor: theme.palette.grey[900] + 25, @@ -73,12 +77,12 @@ const Variables = () => { const customization = useSelector((state) => state.customization) const dispatch = useDispatch() useNotifier() + const { error, setError } = useError() const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) const [isLoading, setLoading] = useState(true) - const [error, setError] = useState(null) const [showVariableDialog, setShowVariableDialog] = useState(false) const [variableDialogProps, setVariableDialogProps] = useState({}) const [variables, setVariables] = useState([]) @@ -181,12 +185,6 @@ const Variables = () => { setLoading(getAllVariables.loading) }, [getAllVariables.loading]) - useEffect(() => { - if (getAllVariables.error) { - setError(getAllVariables.error) - } - }, [getAllVariables.error]) - useEffect(() => { if (getAllVariables.data) { setVariables(getAllVariables.data) @@ -210,7 +208,8 @@ const Variables = () => { - { id='btn_createVariable' > Add Variable - + {!isLoading && variables.length === 0 ? ( @@ -251,8 +250,12 @@ const Variables = () => { Type Last Updated Created - - + + + + + + @@ -274,12 +277,16 @@ const Variables = () => { - - - - - - + + + + + + + + + + @@ -297,12 +304,16 @@ const Variables = () => { - - - - - - + + + + + + + + + + ) : ( @@ -351,20 +362,24 @@ const Variables = () => { {moment(variable.createdDate).format('MMMM Do, YYYY HH:mm:ss')} - - edit(variable)}> - - - - - deleteVariable(variable)} - > - - - + + + edit(variable)}> + + + + + + + deleteVariable(variable)} + > + + + + ))} diff --git a/packages/ui/src/views/vectorstore/UpsertHistoryDialog.jsx b/packages/ui/src/views/vectorstore/UpsertHistoryDialog.jsx index 62f86b56213..9ecbe5c33bd 100644 --- a/packages/ui/src/views/vectorstore/UpsertHistoryDialog.jsx +++ b/packages/ui/src/views/vectorstore/UpsertHistoryDialog.jsx @@ -279,6 +279,7 @@ const UpsertHistoryDialog = ({ show, dialogProps, onCancel }) => { ) } }) + setSelected([]) } } diff --git a/packages/ui/src/views/workspace/AddEditWorkspaceDialog.jsx b/packages/ui/src/views/workspace/AddEditWorkspaceDialog.jsx new file mode 100644 index 00000000000..9e49e9c473b --- /dev/null +++ b/packages/ui/src/views/workspace/AddEditWorkspaceDialog.jsx @@ -0,0 +1,261 @@ +import { createPortal } from 'react-dom' +import PropTypes from 'prop-types' +import { useState, useEffect } from 'react' +import { useDispatch, useSelector } from 'react-redux' + +// Material +import { Button, Dialog, DialogActions, DialogContent, DialogTitle, Box, Typography, OutlinedInput } from '@mui/material' + +// Project imports +import { StyledButton } from '@/ui-component/button/StyledButton' +import ConfirmDialog from '@/ui-component/dialog/ConfirmDialog' + +// Icons +import { IconX, IconUsersGroup } from '@tabler/icons-react' + +// API +import workspaceApi from '@/api/workspace' + +// utils +import useNotifier from '@/utils/useNotifier' + +// Store +import { store } from '@/store' +import { workspaceNameUpdated } from '@/store/reducers/authSlice' + +// const +import { + enqueueSnackbar as enqueueSnackbarAction, + closeSnackbar as closeSnackbarAction, + HIDE_CANVAS_DIALOG, + SHOW_CANVAS_DIALOG +} from '@/store/actions' + +const AddEditWorkspaceDialog = ({ show, dialogProps, onCancel, onConfirm }) => { + const portalElement = document.getElementById('portal') + + const dispatch = useDispatch() + + // ==============================|| Snackbar ||============================== // + + useNotifier() + + const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) + const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) + + const [workspaceName, setWorkspaceName] = useState('') + const [workspaceDescription, setWorkspaceDescription] = useState('') + const [dialogType, setDialogType] = useState('ADD') + const [workspace, setWorkspace] = useState({}) + const currentUser = useSelector((state) => state.auth.user) + + useEffect(() => { + if (dialogProps.type === 'EDIT' && dialogProps.data) { + setWorkspaceName(dialogProps.data.name) + setWorkspaceDescription(dialogProps.data.description) + setDialogType('EDIT') + setWorkspace(dialogProps.data) + } else if (dialogProps.type === 'ADD') { + setWorkspaceName('') + setWorkspaceDescription('') + setDialogType('ADD') + setWorkspace({}) + } + + return () => { + setWorkspaceName('') + setWorkspaceDescription('') + setDialogType('ADD') + setWorkspace({}) + } + }, [dialogProps]) + + useEffect(() => { + if (show) dispatch({ type: SHOW_CANVAS_DIALOG }) + else dispatch({ type: HIDE_CANVAS_DIALOG }) + return () => dispatch({ type: HIDE_CANVAS_DIALOG }) + }, [show, dispatch]) + + const addNewWorkspace = async () => { + if (workspaceName === 'Default Workspace' || workspaceName === 'Personal Workspace') { + enqueueSnackbar({ + message: 'Workspace name cannot be Default Workspace or Personal Workspace - this is a reserved name', + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + return + } + try { + const obj = { + name: workspaceName, + description: workspaceDescription, + createdBy: currentUser.id, + organizationId: currentUser.activeOrganizationId, + existingWorkspaceId: currentUser.activeWorkspaceId // this is used to inherit the current role + } + const createResp = await workspaceApi.createWorkspace(obj) + if (createResp.data) { + enqueueSnackbar({ + message: 'New Workspace added', + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + onConfirm(createResp.data.id) + } + } catch (error) { + enqueueSnackbar({ + message: `Failed to add new Workspace: ${ + typeof error.response.data === 'object' ? error.response.data.message : error.response.data + }`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + onCancel() + } + } + + const saveWorkspace = async () => { + try { + const saveObj = { + id: workspace.id, + name: workspaceName, + description: workspaceDescription, + updatedBy: currentUser.id + } + + const saveResp = await workspaceApi.updateWorkspace(saveObj) + if (saveResp.data) { + store.dispatch(workspaceNameUpdated(saveResp.data)) + enqueueSnackbar({ + message: 'Workspace saved', + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + onConfirm() + } + } catch (error) { + enqueueSnackbar({ + message: `Failed to save Workspace: ${ + typeof error.response.data === 'object' ? error.response.data.message : error.response.data + }`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + onCancel() + } + } + + const component = show ? ( + + +
    + + {dialogProps.type === 'ADD' ? 'Add Workspace' : 'Edit Workspace'} +
    +
    + + +
    + + Name * + +
    +
    + setWorkspaceName(e.target.value)} + value={workspaceName ?? ''} + /> +
    + +
    + Description +
    +
    + setWorkspaceDescription(e.target.value)} + value={workspaceDescription ?? ''} + /> +
    +
    + + + (dialogType === 'ADD' ? addNewWorkspace() : saveWorkspace())} + > + {dialogProps.confirmButtonName} + + + +
    + ) : null + + return createPortal(component, portalElement) +} + +AddEditWorkspaceDialog.propTypes = { + show: PropTypes.bool, + dialogProps: PropTypes.object, + onCancel: PropTypes.func, + onConfirm: PropTypes.func +} + +export default AddEditWorkspaceDialog diff --git a/packages/ui/src/views/workspace/WorkspaceUsers.jsx b/packages/ui/src/views/workspace/WorkspaceUsers.jsx new file mode 100644 index 00000000000..45db1ccaf8f --- /dev/null +++ b/packages/ui/src/views/workspace/WorkspaceUsers.jsx @@ -0,0 +1,549 @@ +import { useEffect, useState } from 'react' +import { useDispatch, useSelector } from 'react-redux' +import moment from 'moment' +import { useNavigate } from 'react-router-dom' + +// material-ui +import { + IconButton, + Checkbox, + Skeleton, + Box, + TableRow, + TableContainer, + Paper, + Table, + TableHead, + TableBody, + Button, + Stack, + Chip +} from '@mui/material' +import { useTheme } from '@mui/material/styles' + +// project imports +import MainCard from '@/ui-component/cards/MainCard' +import { StyledTableCell, StyledTableRow } from '@/ui-component/table/TableStyles' +import ConfirmDialog from '@/ui-component/dialog/ConfirmDialog' +import ErrorBoundary from '@/ErrorBoundary' +import ViewHeader from '@/layout/MainLayout/ViewHeader' +import { PermissionButton, StyledPermissionButton } from '@/ui-component/button/RBACButtons' +import InviteUsersDialog from '@/ui-component/dialog/InviteUsersDialog' +import EditUserDialog from '@/views/users/EditUserDialog' + +// API +import userApi from '@/api/user' +import workspaceApi from '@/api/workspace' + +// Hooks +import useApi from '@/hooks/useApi' +import useNotifier from '@/utils/useNotifier' +import useConfirm from '@/hooks/useConfirm' + +// icons +import empty_datasetSVG from '@/assets/images/empty_datasets.svg' +import { IconEdit, IconX, IconUnlink, IconUserPlus } from '@tabler/icons-react' + +// store +import { useError } from '@/store/context/ErrorContext' +import { closeSnackbar as closeSnackbarAction, enqueueSnackbar as enqueueSnackbarAction } from '@/store/actions' + +const WorkspaceDetails = () => { + const theme = useTheme() + const customization = useSelector((state) => state.customization) + const currentUser = useSelector((state) => state.auth.user) + const navigate = useNavigate() + + const dispatch = useDispatch() + useNotifier() + const { error, setError } = useError() + + const [search, setSearch] = useState('') + const [workspace, setWorkspace] = useState({}) + const [workspaceUsers, setWorkspaceUsers] = useState([]) + const [isLoading, setLoading] = useState(true) + const [usersSelected, setUsersSelected] = useState([]) + + const [showAddUserDialog, setShowAddUserDialog] = useState(false) + const [dialogProps, setDialogProps] = useState({}) + const [showEditDialog, setShowEditDialog] = useState(false) + const [editDialogProps, setEditDialogProps] = useState({}) + + const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) + const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) + + const { confirm } = useConfirm() + + const getAllUsersByWorkspaceIdApi = useApi(userApi.getAllUsersByWorkspaceId) + const getWorkspaceByIdApi = useApi(workspaceApi.getWorkspaceById) + + const URLpath = document.location.pathname.toString().split('/') + const workspaceId = URLpath[URLpath.length - 1] === 'workspace-users' ? '' : URLpath[URLpath.length - 1] + + const onUsersSelectAllClick = (event) => { + if (event.target.checked) { + const newSelected = (workspaceUsers || []) + .filter((n) => !n.isOrgOwner) + .map((n) => ({ + userId: n.userId, + name: n.user.name, + email: n.user.email + })) + setUsersSelected(newSelected) + return + } + setUsersSelected([]) + } + + const handleUserSelect = (event, user) => { + const selectedIndex = usersSelected.findIndex((item) => item.userId === user.userId) + let newSelected = [] + + if (selectedIndex === -1) { + newSelected = newSelected.concat(usersSelected, { + userId: user.userId, + name: user.user.name, + email: user.user.email + }) + } else if (selectedIndex === 0) { + newSelected = newSelected.concat(usersSelected.slice(1)) + } else if (selectedIndex === usersSelected.length - 1) { + newSelected = newSelected.concat(usersSelected.slice(0, -1)) + } else if (selectedIndex > 0) { + newSelected = newSelected.concat(usersSelected.slice(0, selectedIndex), usersSelected.slice(selectedIndex + 1)) + } + setUsersSelected(newSelected) + } + + const isUserSelected = (userId) => usersSelected.findIndex((item) => item.userId === userId) !== -1 + + const addUser = () => { + const dialogProp = { + type: 'ADD', + cancelButtonName: 'Cancel', + confirmButtonName: 'Send Invite', + data: workspace + } + setDialogProps(dialogProp) + setShowAddUserDialog(true) + } + + const onEditClick = (user) => { + if (user.status.toUpperCase() === 'INVITED') { + editInvite(user) + } else { + editUser(user) + } + } + + const editInvite = (user) => { + const dialogProp = { + type: 'EDIT', + cancelButtonName: 'Cancel', + confirmButtonName: 'Update Invite', + data: { + ...user, + isWorkspaceUser: true + }, + disableWorkspaceSelection: true + } + setDialogProps(dialogProp) + setShowAddUserDialog(true) + } + + const editUser = (user) => { + // Not used for now + const userObj = { + ...user, + assignedRoles: [ + { + role: user.role, + active: true + } + ], + activeWorkspaceId: workspaceId + } + const dialogProp = { + type: 'EDIT', + cancelButtonName: 'Cancel', + confirmButtonName: 'Save', + data: userObj + } + setEditDialogProps(dialogProp) + setShowEditDialog(true) + } + + const unlinkUser = async () => { + const userList = usersSelected.map((user) => (user.name ? `${user.name} (${user.email})` : user.email)).join(', ') + + const confirmPayload = { + title: `Remove Users`, + description: `Remove the following users from the workspace?\n${userList}`, + confirmButtonName: 'Remove', + cancelButtonName: 'Cancel' + } + + const orgOwner = workspaceUsers.find( + (user) => usersSelected.some((selected) => selected.userId === user.id) && user.isOrgOwner === true + ) + if (orgOwner) { + enqueueSnackbar({ + message: `Organization owner cannot be removed from workspace.`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + action: (key) => ( + + ) + } + }) + return + } + + const isConfirmed = await confirm(confirmPayload) + + if (isConfirmed) { + try { + const deletePromises = usersSelected.map((user) => userApi.deleteWorkspaceUser(workspaceId, user.userId)) + await Promise.all(deletePromises) + + enqueueSnackbar({ + message: `${usersSelected.length} User(s) removed from workspace.`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + + // Check if current user is being removed + if (usersSelected.some((user) => user.userId === currentUser.id)) { + navigate('/', { replace: true }) + navigate(0) + return + } + + onConfirm() + } catch (error) { + enqueueSnackbar({ + message: `Failed to unlink users: ${ + typeof error.response.data === 'object' ? error.response.data.message : error.response.data + }`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + } + setUsersSelected([]) + } + } + + const onConfirm = () => { + setShowAddUserDialog(false) + setShowEditDialog(false) + getAllUsersByWorkspaceIdApi.request(workspaceId) + } + + const onSearchChange = (event) => { + setSearch(event.target.value) + } + + function filterUsers(data) { + return ( + data.user.name?.toLowerCase().indexOf(search.toLowerCase()) > -1 || + data.user.email?.toLowerCase().indexOf(search.toLowerCase()) > -1 + ) + } + + useEffect(() => { + getWorkspaceByIdApi.request(workspaceId) + getAllUsersByWorkspaceIdApi.request(workspaceId) + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []) + + useEffect(() => { + if (getWorkspaceByIdApi.data) { + setWorkspace(getWorkspaceByIdApi.data) + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getWorkspaceByIdApi.data]) + + useEffect(() => { + if (getAllUsersByWorkspaceIdApi.data) { + const workSpaceUsers = getAllUsersByWorkspaceIdApi.data || [] + const orgAdmin = workSpaceUsers.find((item) => item.isOrgOwner) + if (orgAdmin) { + workSpaceUsers.splice(workSpaceUsers.indexOf(orgAdmin), 1) + workSpaceUsers.unshift(orgAdmin) + } + setWorkspaceUsers(workSpaceUsers) + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getAllUsersByWorkspaceIdApi.data]) + + useEffect(() => { + if (getAllUsersByWorkspaceIdApi.error) { + setError(getAllUsersByWorkspaceIdApi.error) + } + }, [getAllUsersByWorkspaceIdApi.error, setError]) + + useEffect(() => { + setLoading(getAllUsersByWorkspaceIdApi.loading) + }, [getAllUsersByWorkspaceIdApi.loading]) + + return ( + <> + + {error ? ( + + ) : ( + + window.history.back()} + search={workspaceUsers.length > 0} + onSearchChange={onSearchChange} + searchPlaceholder={'Search Users'} + title={(workspace?.name || '') + ': Workspace Users'} + description={'Manage workspace users and permissions.'} + > + {workspaceUsers.length > 0 && ( + <> + } + > + Remove Users + + } + > + Add User + + + )} + + {!isLoading && workspaceUsers?.length <= 0 ? ( + + + empty_datasetSVG + +
    No Assigned Users Yet
    + } + onClick={addUser} + > + Add User + +
    + ) : ( + <> + + + + + + + + Email/Name + Role + Status + Last Login + + + + + {isLoading ? ( + <> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ) : ( + <> + {(workspaceUsers || []).filter(filterUsers).map((item, index) => ( + + + {item.isOrgOwner ? null : ( + handleUserSelect(event, item)} + inputProps={{ + 'aria-labelledby': item.userId + }} + /> + )} + + + {item.user.name && ( + <> + {item.user.name} +
    + + )} + {item.user.email} +
    + + {item.isOrgOwner ? ( + + ) : ( + item.role.name + )} + + + {item.isOrgOwner ? ( + <> + ) : ( + <> + {'ACTIVE' === item.status.toUpperCase() && ( + + )} + {'INVITED' === item.status.toUpperCase() && ( + + )} + {'INACTIVE' === item.status.toUpperCase() && ( + + )} + + )} + + + {!item.lastLogin + ? 'Never' + : moment(item.lastLogin).format('DD/MM/YYYY HH:mm')} + + + {!item.isOrgOwner && item.status.toUpperCase() === 'INVITED' && ( + onEditClick(item)} + > + + + )} + +
    + ))} + + )} +
    +
    +
    + + )} +
    + )} +
    + {showAddUserDialog && ( + setShowAddUserDialog(false)} + onConfirm={onConfirm} + > + )} + {showEditDialog && ( + setShowEditDialog(false)} + onConfirm={onConfirm} + setError={setError} + > + )} + + + ) +} + +export default WorkspaceDetails diff --git a/packages/ui/src/views/workspace/index.jsx b/packages/ui/src/views/workspace/index.jsx new file mode 100644 index 00000000000..3c12764dda3 --- /dev/null +++ b/packages/ui/src/views/workspace/index.jsx @@ -0,0 +1,548 @@ +import moment from 'moment/moment' +import * as PropTypes from 'prop-types' +import { Fragment, useEffect, useState } from 'react' +import { useDispatch, useSelector } from 'react-redux' +import { useNavigate } from 'react-router-dom' + +// material-ui +import { + Box, + Button, + Chip, + Drawer, + IconButton, + Paper, + Skeleton, + Stack, + Table, + TableBody, + TableCell, + TableContainer, + TableHead, + TableRow, + Typography, + Dialog, + DialogContent, + CircularProgress +} from '@mui/material' +import { useTheme } from '@mui/material/styles' + +// project imports +import ErrorBoundary from '@/ErrorBoundary' +import ViewHeader from '@/layout/MainLayout/ViewHeader' +import { PermissionIconButton, StyledPermissionButton } from '@/ui-component/button/RBACButtons' +import MainCard from '@/ui-component/cards/MainCard' +import ConfirmDialog from '@/ui-component/dialog/ConfirmDialog' +import { StyledTableCell, StyledTableRow } from '@/ui-component/table/TableStyles' +import AddEditWorkspaceDialog from './AddEditWorkspaceDialog' + +// API +import userApi from '@/api/user' +import workspaceApi from '@/api/workspace' + +// Hooks +import useApi from '@/hooks/useApi' +import useConfirm from '@/hooks/useConfirm' + +// icons +import workspaces_emptySVG from '@/assets/images/workspaces_empty.svg' +import { IconEdit, IconEye, IconEyeOff, IconPlus, IconTrash, IconTrashOff, IconUsers, IconX } from '@tabler/icons-react' + +// Utils +import { truncateString } from '@/utils/genericHelper' +import useNotifier from '@/utils/useNotifier' + +// Store +import { store } from '@/store' +import { closeSnackbar as closeSnackbarAction, enqueueSnackbar as enqueueSnackbarAction } from '@/store/actions' +import { useError } from '@/store/context/ErrorContext' +import { workspaceSwitchSuccess } from '@/store/reducers/authSlice' +import { Link } from 'react-router-dom' + +function ShowWorkspaceRow(props) { + const customization = useSelector((state) => state.customization) + const currentUser = useSelector((state) => state.auth.user) + const [open, setOpen] = useState(false) + const [selectedWorkspaceId, setSelectedWorkspaceId] = useState('') + const [workspaceUsers, setWorkspaceUsers] = useState([]) + + const theme = useTheme() + + const getAllUsersByWorkspaceIdApi = useApi(userApi.getAllUsersByWorkspaceId) + + const handleViewWorkspaceUsers = (workspaceId) => { + setOpen(!open) + setSelectedWorkspaceId(workspaceId) + } + + useEffect(() => { + if (getAllUsersByWorkspaceIdApi.data) { + setWorkspaceUsers(getAllUsersByWorkspaceIdApi.data) + } + }, [getAllUsersByWorkspaceIdApi.data]) + + useEffect(() => { + if (open && selectedWorkspaceId) { + getAllUsersByWorkspaceIdApi.request(selectedWorkspaceId) + } else { + setOpen(false) + setSelectedWorkspaceId('') + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [open]) + + return ( + + + + {props.workspace.name} + {currentUser.activeWorkspaceId === props.workspace.id && ( + + )} + + + {truncateString(props.workspace?.description || '', 200)} + + + {props.workspace.userCount}{' '} + {props.workspace.userCount > 0 && ( + handleViewWorkspaceUsers(props.workspace.id)} + > + {props.workspace.userCount > 0 && open ? : } + + )} + + {moment(props.workspace.updatedDate).format('MMMM Do YYYY, hh:mm A')} + + {props.workspace.name !== 'Default Workspace' && ( + props.onEditClick(props.workspace)} + > + + + )} + + + + + + {props.workspace.name !== 'Default Workspace' && + (props.workspace.userCount > 1 || props.workspace.isOrgDefault === true ? ( + props.onDeleteClick(props.workspace)}> + + + ) : ( + props.onDeleteClick(props.workspace)} + > + + + ))} + + + setOpen(false)} sx={{ minWidth: 320 }}> + + + Users + + + + + + User + Role + + + + {workspaceUsers && + workspaceUsers.length > 0 && + workspaceUsers.map((item, index) => ( + + {item.user.name || item.user.email} + + {item.isOrgOwner ? ( + + ) : item.role.name === 'personal workspace' ? ( + + ) : ( + item.role.name + )} + + + ))} + +
    +
    +
    +
    +
    + ) +} + +ShowWorkspaceRow.propTypes = { + rowKey: PropTypes.any, + workspace: PropTypes.any, + onEditClick: PropTypes.func, + onDeleteClick: PropTypes.func, + onViewUsersClick: PropTypes.func, + open: PropTypes.bool, + theme: PropTypes.any +} + +// ==============================|| Workspaces ||============================== // + +const Workspaces = () => { + const navigate = useNavigate() + const theme = useTheme() + const { confirm } = useConfirm() + const currentUser = useSelector((state) => state.auth.user) + const customization = useSelector((state) => state.customization) + + useNotifier() + const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) + const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) + + const [search, setSearch] = useState('') + const dispatch = useDispatch() + const { error, setError } = useError() + const [isLoading, setLoading] = useState(true) + const [workspaces, setWorkspaces] = useState([]) + const [showWorkspaceDialog, setShowWorkspaceDialog] = useState(false) + const [workspaceDialogProps, setWorkspaceDialogProps] = useState({}) + const [isSwitching, setIsSwitching] = useState(false) + const [isDeleting, setIsDeleting] = useState(false) + + const getAllWorkspacesApi = useApi(workspaceApi.getAllWorkspacesByOrganizationId) + const switchWorkspaceApi = useApi(workspaceApi.switchWorkspace) + + const showWorkspaceUsers = (selectedWorkspace) => { + navigate(`/workspace-users/${selectedWorkspace.id}`) + } + + const onSearchChange = (event) => { + setSearch(event.target.value) + } + + const addNew = () => { + const dialogProp = { + type: 'ADD', + cancelButtonName: 'Cancel', + confirmButtonName: 'Add', + data: {} + } + setWorkspaceDialogProps(dialogProp) + setShowWorkspaceDialog(true) + } + + const edit = (workspace) => { + const dialogProp = { + type: 'EDIT', + cancelButtonName: 'Cancel', + confirmButtonName: 'Save', + data: workspace + } + setWorkspaceDialogProps(dialogProp) + setShowWorkspaceDialog(true) + } + + const deleteWorkspace = async (workspace) => { + const confirmPayload = { + title: `Delete Workspace ${workspace.name}`, + description: `This is irreversible and will remove all associated data inside the workspace. Are you sure you want to delete?`, + confirmButtonName: 'Delete', + cancelButtonName: 'Cancel' + } + const isConfirmed = await confirm(confirmPayload) + + if (isConfirmed) { + setIsDeleting(true) + try { + const deleteWorkspaceId = workspace.id + const deleteResp = await workspaceApi.deleteWorkspace(deleteWorkspaceId) + if (deleteResp.data) { + enqueueSnackbar({ + message: 'Workspace deleted', + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + onConfirm(deleteWorkspaceId, true) + } + } catch (error) { + console.error('Failed to delete workspace:', error) + enqueueSnackbar({ + message: `Failed to delete workspace: ${ + typeof error.response.data === 'object' ? error.response.data.message : error.response.data + }`, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + } finally { + setIsDeleting(false) + } + } + } + + const onConfirm = (specificWorkspaceId, isDeleteWorkspace) => { + setShowWorkspaceDialog(false) + getAllWorkspacesApi.request(currentUser.activeOrganizationId) + + const assignedWorkspaces = currentUser.assignedWorkspaces + if (assignedWorkspaces.length === 0 || workspaces.length === 0) { + return + } + + // if the deleted workspace is the active workspace, switch to first available workspace + if (isDeleteWorkspace && currentUser.activeWorkspaceId === specificWorkspaceId) { + setIsSwitching(true) + const workspaceId = workspaces[0].id + switchWorkspaceApi.request(workspaceId) + } else if (!isDeleteWorkspace && specificWorkspaceId) { + setIsSwitching(true) + switchWorkspaceApi.request(specificWorkspaceId) + } + } + + function filterWorkspaces(data) { + return data.name.toLowerCase().indexOf(search.toLowerCase()) > -1 + } + + useEffect(() => { + if (switchWorkspaceApi.data) { + setIsSwitching(false) + + // Create a promise that resolves when the state is updated + const waitForStateUpdate = new Promise((resolve) => { + const unsubscribe = store.subscribe(() => { + const state = store.getState() + if (state.auth.user.activeWorkspaceId === switchWorkspaceApi.data.activeWorkspaceId) { + unsubscribe() + resolve() + } + }) + }) + + // Dispatch and wait for state update before navigating + store.dispatch(workspaceSwitchSuccess(switchWorkspaceApi.data)) + waitForStateUpdate.then(() => { + navigate('/', { replace: true }) + navigate(0) + }) + } + }, [switchWorkspaceApi.data, navigate]) + + useEffect(() => { + if (getAllWorkspacesApi.data) { + setWorkspaces(getAllWorkspacesApi.data) + } + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getAllWorkspacesApi.data]) + + useEffect(() => { + setLoading(getAllWorkspacesApi.loading) + }, [getAllWorkspacesApi.loading]) + + useEffect(() => { + if (getAllWorkspacesApi.error) { + setError(getAllWorkspacesApi.error) + } + }, [getAllWorkspacesApi.error, setError]) + + useEffect(() => { + getAllWorkspacesApi.request(currentUser.activeOrganizationId) + // eslint-disable-next-line react-hooks/exhaustive-deps + }, []) + + return ( + <> + + {error ? ( + + ) : ( + + + } + > + Add New + + + {!isLoading && workspaces.length <= 0 ? ( + + + workspaces_emptySVG + +
    No Workspaces Yet
    +
    + ) : ( + + + + + Name + Description + Users + Last Updated + + + + + {isLoading ? ( + <> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + ) : ( + <> + {workspaces.filter(filterWorkspaces).map((ds, index) => ( + + ))} + + )} + +
    +
    + )} +
    + )} +
    + {showWorkspaceDialog && ( + setShowWorkspaceDialog(false)} + onConfirm={onConfirm} + > + )} + + + + + + + Switching workspace... + + + + + + + + + + Deleting workspace... + + + + + + ) +} + +export default Workspaces diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 54c22379d89..1ab10f735c5 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -49,7 +49,7 @@ importers: version: 8.10.0(eslint@8.57.0) eslint-config-react-app: specifier: ^7.0.1 - version: 7.0.1(@babel/plugin-syntax-flow@7.23.3(@babel/core@7.24.0))(@babel/plugin-transform-react-jsx@7.25.9(@babel/core@7.24.0))(eslint@8.57.0)(jest@27.5.1(bufferutil@4.0.8)(canvas@2.11.2(encoding@0.1.13))(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2))(utf-8-validate@6.0.4))(typescript@5.5.2) + version: 7.0.1(@babel/plugin-syntax-flow@7.23.3(@babel/core@7.24.0))(@babel/plugin-transform-react-jsx@7.25.9(@babel/core@7.24.0))(eslint@8.57.0)(jest@27.5.1(bufferutil@4.0.8)(canvas@2.11.2(encoding@0.1.13))(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2))(utf-8-validate@6.0.4))(typescript@5.5.2) eslint-plugin-jsx-a11y: specifier: ^6.6.1 version: 6.8.0(eslint@8.57.0) @@ -134,7 +134,7 @@ importers: version: 3.529.1 '@aws-sdk/client-secrets-manager': specifier: ^3.699.0 - version: 3.723.0 + version: 3.726.1 '@datastax/astra-db-ts': specifier: 1.5.0 version: 1.5.0 @@ -152,7 +152,7 @@ importers: version: 3.9.25 '@getzep/zep-cloud': specifier: ~1.0.7 - version: 1.0.7(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(langchain@0.3.5(@langchain/anthropic@0.3.14(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13))(@langchain/aws@0.1.4(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/cohere@0.0.7(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/google-genai@0.2.3(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(zod@3.22.4))(@langchain/google-vertexai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(zod@3.22.4))(@langchain/groq@0.1.2(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)))(@langchain/mistralai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/ollama@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(axios@1.7.9)(cheerio@1.0.0-rc.12)(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(typeorm@0.3.20(ioredis@5.3.2)(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2)))(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))) + version: 1.0.7(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(langchain@0.3.6(@langchain/anthropic@0.3.14(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13))(@langchain/aws@0.1.4(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/cohere@0.0.7(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/google-genai@0.2.3(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(zod@3.22.4))(@langchain/google-vertexai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(zod@3.22.4))(@langchain/groq@0.1.2(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)))(@langchain/mistralai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/ollama@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(axios@1.7.9)(cheerio@1.0.0-rc.12)(encoding@0.1.13)(handlebars@4.7.8)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(typeorm@0.3.20(ioredis@5.3.2)(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2)))(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))) '@getzep/zep-js': specifier: ^0.9.0 version: 0.9.0 @@ -188,7 +188,7 @@ importers: version: 0.0.7(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)) '@langchain/community': specifier: ^0.3.29 - version: 0.3.37(@aws-crypto/sha256-js@5.2.0)(@aws-sdk/client-bedrock-agent-runtime@3.755.0)(@aws-sdk/client-bedrock-runtime@3.422.0)(@aws-sdk/client-dynamodb@3.529.1)(@aws-sdk/client-kendra@3.750.0)(@aws-sdk/client-s3@3.529.1)(@aws-sdk/credential-provider-node@3.529.1)(@browserbasehq/sdk@2.0.0(encoding@0.1.13))(@browserbasehq/stagehand@1.9.0(@playwright/test@1.49.1)(bufferutil@4.0.8)(deepmerge@4.3.1)(dotenv@16.4.5)(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(utf-8-validate@6.0.4)(zod@3.22.4))(@datastax/astra-db-ts@1.5.0)(@elastic/elasticsearch@8.12.2)(@getzep/zep-cloud@1.0.7(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(langchain@0.3.5(@langchain/anthropic@0.3.14(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13))(@langchain/aws@0.1.4(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/cohere@0.0.7(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/google-genai@0.2.3(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(zod@3.22.4))(@langchain/google-vertexai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(zod@3.22.4))(@langchain/groq@0.1.2(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)))(@langchain/mistralai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/ollama@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(axios@1.7.9)(cheerio@1.0.0-rc.12)(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(typeorm@0.3.20(ioredis@5.3.2)(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2)))(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))))(@getzep/zep-js@0.9.0)(@gomomento/sdk-core@1.68.1)(@gomomento/sdk@1.68.1(encoding@0.1.13))(@google-ai/generativelanguage@2.6.0(encoding@0.1.13))(@google-cloud/storage@7.16.0(encoding@0.1.13))(@huggingface/inference@2.6.4)(@ibm-cloud/watsonx-ai@1.1.2)(@langchain/anthropic@0.3.14(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13))(@langchain/aws@0.1.4(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/cohere@0.0.7(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/google-genai@0.2.3(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(zod@3.22.4))(@langchain/google-vertexai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(zod@3.22.4))(@langchain/groq@0.1.2(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)))(@langchain/mistralai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/ollama@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@mendable/firecrawl-js@0.0.28)(@notionhq/client@2.2.14(encoding@0.1.13))(@opensearch-project/opensearch@1.2.0)(@pinecone-database/pinecone@4.0.0)(@qdrant/js-client-rest@1.9.0(typescript@5.5.2))(@smithy/eventstream-codec@4.0.1)(@smithy/protocol-http@5.0.1)(@smithy/signature-v4@5.0.1)(@smithy/util-utf8@4.0.0)(@supabase/supabase-js@2.39.8(bufferutil@4.0.8)(utf-8-validate@6.0.4))(@upstash/redis@1.22.1(encoding@0.1.13))(@upstash/vector@1.1.5)(@zilliz/milvus2-sdk-node@2.3.5)(apify-client@2.9.3)(assemblyai@4.3.2(bufferutil@4.0.8)(utf-8-validate@6.0.4))(axios@1.7.9)(cheerio@1.0.0-rc.12)(chromadb@1.10.0(@google/generative-ai@0.24.0)(cohere-ai@7.10.0(encoding@0.1.13))(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(cohere-ai@7.10.0(encoding@0.1.13))(crypto-js@4.2.0)(d3-dsv@2.0.0)(encoding@0.1.13)(epub2@3.0.2(ts-toolbelt@9.6.0))(fast-xml-parser@4.4.1)(google-auth-library@9.6.3(encoding@0.1.13))(html-to-text@9.0.5)(ibm-cloud-sdk-core@5.1.0)(ignore@5.3.1)(ioredis@5.3.2)(jsdom@22.1.0(bufferutil@4.0.8)(canvas@2.11.2(encoding@0.1.13))(utf-8-validate@6.0.4))(jsonwebtoken@9.0.2)(lodash@4.17.21)(lunary@0.7.12(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(react@18.2.0))(mammoth@1.7.0)(mem0ai@2.1.12(@anthropic-ai/sdk@0.37.0(encoding@0.1.13))(@qdrant/js-client-rest@1.9.0(typescript@5.5.2))(@supabase/supabase-js@2.39.8(bufferutil@4.0.8)(utf-8-validate@6.0.4))(@types/jest@29.5.12)(@types/pg@8.11.2)(@types/sqlite3@3.1.11)(encoding@0.1.13)(groq-sdk@0.5.0(encoding@0.1.13))(neo4j-driver@5.27.0)(ollama@0.5.11)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)))(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(neo4j-driver@5.27.0)(notion-to-md@3.1.1(encoding@0.1.13))(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(pdf-parse@1.1.1)(pg@8.11.3)(playwright@1.42.1)(portkey-ai@0.1.16)(puppeteer@20.9.0(bufferutil@4.0.8)(encoding@0.1.13)(typescript@5.5.2)(utf-8-validate@6.0.4))(pyodide@0.25.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(redis@4.6.13)(replicate@0.31.1)(srt-parser-2@1.2.3)(typeorm@0.3.20(ioredis@5.3.2)(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2)))(weaviate-ts-client@1.6.0(encoding@0.1.13)(graphql@16.8.1))(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)) + version: 0.3.40(@aws-crypto/sha256-js@5.2.0)(@aws-sdk/client-bedrock-agent-runtime@3.755.0)(@aws-sdk/client-bedrock-runtime@3.422.0)(@aws-sdk/client-dynamodb@3.529.1)(@aws-sdk/client-kendra@3.750.0)(@aws-sdk/client-s3@3.529.1)(@aws-sdk/credential-provider-node@3.529.1)(@browserbasehq/sdk@2.0.0(encoding@0.1.13))(@browserbasehq/stagehand@1.9.0(@playwright/test@1.49.1)(bufferutil@4.0.8)(deepmerge@4.3.1)(dotenv@16.4.5)(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(utf-8-validate@6.0.4)(zod@3.22.4))(@datastax/astra-db-ts@1.5.0)(@elastic/elasticsearch@8.12.2)(@getzep/zep-cloud@1.0.7(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(langchain@0.3.6(@langchain/anthropic@0.3.14(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13))(@langchain/aws@0.1.4(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/cohere@0.0.7(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/google-genai@0.2.3(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(zod@3.22.4))(@langchain/google-vertexai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(zod@3.22.4))(@langchain/groq@0.1.2(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)))(@langchain/mistralai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/ollama@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(axios@1.7.9)(cheerio@1.0.0-rc.12)(encoding@0.1.13)(handlebars@4.7.8)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(typeorm@0.3.20(ioredis@5.3.2)(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2)))(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))))(@getzep/zep-js@0.9.0)(@gomomento/sdk-core@1.68.1)(@gomomento/sdk@1.68.1(encoding@0.1.13))(@google-ai/generativelanguage@2.6.0(encoding@0.1.13))(@google-cloud/storage@7.16.0(encoding@0.1.13))(@huggingface/inference@2.6.4)(@ibm-cloud/watsonx-ai@1.2.0)(@langchain/anthropic@0.3.14(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13))(@langchain/aws@0.1.4(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/cohere@0.0.7(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/google-genai@0.2.3(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(zod@3.22.4))(@langchain/google-vertexai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(zod@3.22.4))(@langchain/groq@0.1.2(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)))(@langchain/mistralai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/ollama@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@mendable/firecrawl-js@1.25.1)(@notionhq/client@2.2.14(encoding@0.1.13))(@opensearch-project/opensearch@1.2.0)(@pinecone-database/pinecone@4.0.0)(@qdrant/js-client-rest@1.9.0(typescript@5.5.2))(@smithy/eventstream-codec@4.0.1)(@smithy/protocol-http@5.0.1)(@smithy/signature-v4@5.0.1)(@smithy/util-utf8@4.0.0)(@supabase/supabase-js@2.39.8(bufferutil@4.0.8)(utf-8-validate@6.0.4))(@upstash/redis@1.22.1(encoding@0.1.13))(@upstash/vector@1.1.5)(@zilliz/milvus2-sdk-node@2.3.5)(apify-client@2.9.3)(assemblyai@4.3.2(bufferutil@4.0.8)(utf-8-validate@6.0.4))(axios@1.7.9)(cheerio@1.0.0-rc.12)(chromadb@1.10.3(@google/generative-ai@0.24.0)(cohere-ai@7.10.0(encoding@0.1.13))(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(cohere-ai@7.10.0(encoding@0.1.13))(crypto-js@4.2.0)(d3-dsv@2.0.0)(encoding@0.1.13)(epub2@3.0.2(ts-toolbelt@9.6.0))(fast-xml-parser@4.4.1)(google-auth-library@9.6.3(encoding@0.1.13))(handlebars@4.7.8)(html-to-text@9.0.5)(ibm-cloud-sdk-core@5.1.0)(ignore@5.3.1)(ioredis@5.3.2)(jsdom@22.1.0(bufferutil@4.0.8)(canvas@2.11.2(encoding@0.1.13))(utf-8-validate@6.0.4))(jsonwebtoken@9.0.2)(lodash@4.17.21)(lunary@0.7.12(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(react@18.2.0))(mammoth@1.7.0)(mem0ai@2.1.16(@anthropic-ai/sdk@0.37.0(encoding@0.1.13))(@google/genai@0.7.0(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.4))(@mistralai/mistralai@0.1.3(encoding@0.1.13))(@qdrant/js-client-rest@1.9.0(typescript@5.5.2))(@supabase/supabase-js@2.39.8(bufferutil@4.0.8)(utf-8-validate@6.0.4))(@types/jest@29.5.14)(@types/pg@8.11.2)(@types/sqlite3@3.1.11)(encoding@0.1.13)(groq-sdk@0.5.0(encoding@0.1.13))(neo4j-driver@5.27.0)(ollama@0.5.11)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)))(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(neo4j-driver@5.27.0)(notion-to-md@3.1.1(encoding@0.1.13))(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(pdf-parse@1.1.1)(pg@8.11.3)(playwright@1.42.1)(portkey-ai@0.1.16)(puppeteer@20.9.0(bufferutil@4.0.8)(encoding@0.1.13)(typescript@5.5.2)(utf-8-validate@6.0.4))(pyodide@0.25.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(redis@4.6.13)(replicate@0.31.1)(srt-parser-2@1.2.3)(typeorm@0.3.20(ioredis@5.3.2)(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2)))(weaviate-ts-client@1.6.0(encoding@0.1.13)(graphql@16.8.1))(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)) '@langchain/core': specifier: 0.3.37 version: 0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)) @@ -233,16 +233,16 @@ importers: version: 0.0.1(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)) '@mem0/community': specifier: ^0.0.1 - version: 0.0.1(@anthropic-ai/sdk@0.37.0(encoding@0.1.13))(@aws-crypto/sha256-js@5.2.0)(@aws-sdk/client-bedrock-agent-runtime@3.755.0)(@aws-sdk/client-bedrock-runtime@3.422.0)(@aws-sdk/client-dynamodb@3.529.1)(@aws-sdk/client-kendra@3.750.0)(@aws-sdk/client-s3@3.529.1)(@aws-sdk/credential-provider-node@3.529.1)(@browserbasehq/sdk@2.0.0(encoding@0.1.13))(@browserbasehq/stagehand@1.9.0(@playwright/test@1.49.1)(bufferutil@4.0.8)(deepmerge@4.3.1)(dotenv@16.4.5)(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(utf-8-validate@6.0.4)(zod@3.22.4))(@datastax/astra-db-ts@1.5.0)(@elastic/elasticsearch@8.12.2)(@getzep/zep-cloud@1.0.7(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(langchain@0.3.5(@langchain/anthropic@0.3.14(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13))(@langchain/aws@0.1.4(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/cohere@0.0.7(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/google-genai@0.2.3(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(zod@3.22.4))(@langchain/google-vertexai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(zod@3.22.4))(@langchain/groq@0.1.2(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)))(@langchain/mistralai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/ollama@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(axios@1.7.9)(cheerio@1.0.0-rc.12)(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(typeorm@0.3.20(ioredis@5.3.2)(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2)))(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))))(@getzep/zep-js@0.9.0)(@gomomento/sdk-core@1.68.1)(@gomomento/sdk@1.68.1(encoding@0.1.13))(@google-ai/generativelanguage@2.6.0(encoding@0.1.13))(@google-cloud/storage@7.16.0(encoding@0.1.13))(@huggingface/inference@2.6.4)(@ibm-cloud/watsonx-ai@1.1.2)(@langchain/anthropic@0.3.14(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13))(@langchain/aws@0.1.4(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/cohere@0.0.7(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/google-genai@0.2.3(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(zod@3.22.4))(@langchain/google-vertexai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(zod@3.22.4))(@langchain/groq@0.1.2(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)))(@langchain/mistralai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/ollama@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@mendable/firecrawl-js@0.0.28)(@notionhq/client@2.2.14(encoding@0.1.13))(@opensearch-project/opensearch@1.2.0)(@pinecone-database/pinecone@4.0.0)(@qdrant/js-client-rest@1.9.0(typescript@5.5.2))(@smithy/eventstream-codec@4.0.1)(@smithy/protocol-http@5.0.1)(@smithy/signature-v4@5.0.1)(@smithy/util-utf8@4.0.0)(@supabase/supabase-js@2.39.8(bufferutil@4.0.8)(utf-8-validate@6.0.4))(@types/jest@29.5.12)(@types/pg@8.11.2)(@types/sqlite3@3.1.11)(@upstash/redis@1.22.1(encoding@0.1.13))(@upstash/vector@1.1.5)(@zilliz/milvus2-sdk-node@2.3.5)(apify-client@2.9.3)(assemblyai@4.3.2(bufferutil@4.0.8)(utf-8-validate@6.0.4))(cheerio@1.0.0-rc.12)(chromadb@1.10.0(@google/generative-ai@0.24.0)(cohere-ai@7.10.0(encoding@0.1.13))(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(cohere-ai@7.10.0(encoding@0.1.13))(crypto-js@4.2.0)(d3-dsv@2.0.0)(encoding@0.1.13)(epub2@3.0.2(ts-toolbelt@9.6.0))(fast-xml-parser@4.4.1)(google-auth-library@9.6.3(encoding@0.1.13))(groq-sdk@0.5.0(encoding@0.1.13))(html-to-text@9.0.5)(ibm-cloud-sdk-core@5.1.0)(ignore@5.3.1)(ioredis@5.3.2)(jsdom@22.1.0(bufferutil@4.0.8)(canvas@2.11.2(encoding@0.1.13))(utf-8-validate@6.0.4))(jsonwebtoken@9.0.2)(lodash@4.17.21)(lunary@0.7.12(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(react@18.2.0))(mammoth@1.7.0)(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(neo4j-driver@5.27.0)(notion-to-md@3.1.1(encoding@0.1.13))(ollama@0.5.11)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(pdf-parse@1.1.1)(pg@8.11.3)(playwright@1.42.1)(portkey-ai@0.1.16)(puppeteer@20.9.0(bufferutil@4.0.8)(encoding@0.1.13)(typescript@5.5.2)(utf-8-validate@6.0.4))(pyodide@0.25.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(redis@4.6.13)(replicate@0.31.1)(sqlite3@5.1.7)(srt-parser-2@1.2.3)(typeorm@0.3.20(ioredis@5.3.2)(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2)))(weaviate-ts-client@1.6.0(encoding@0.1.13)(graphql@16.8.1))(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)) + version: 0.0.1(@anthropic-ai/sdk@0.37.0(encoding@0.1.13))(@aws-crypto/sha256-js@5.2.0)(@aws-sdk/client-bedrock-agent-runtime@3.755.0)(@aws-sdk/client-bedrock-runtime@3.422.0)(@aws-sdk/client-dynamodb@3.529.1)(@aws-sdk/client-kendra@3.750.0)(@aws-sdk/client-s3@3.529.1)(@aws-sdk/credential-provider-node@3.529.1)(@browserbasehq/sdk@2.0.0(encoding@0.1.13))(@browserbasehq/stagehand@1.9.0(@playwright/test@1.49.1)(bufferutil@4.0.8)(deepmerge@4.3.1)(dotenv@16.4.5)(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(utf-8-validate@6.0.4)(zod@3.22.4))(@datastax/astra-db-ts@1.5.0)(@elastic/elasticsearch@8.12.2)(@getzep/zep-cloud@1.0.7(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(langchain@0.3.6(@langchain/anthropic@0.3.14(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13))(@langchain/aws@0.1.4(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/cohere@0.0.7(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/google-genai@0.2.3(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(zod@3.22.4))(@langchain/google-vertexai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(zod@3.22.4))(@langchain/groq@0.1.2(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)))(@langchain/mistralai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/ollama@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(axios@1.7.9)(cheerio@1.0.0-rc.12)(encoding@0.1.13)(handlebars@4.7.8)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(typeorm@0.3.20(ioredis@5.3.2)(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2)))(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))))(@getzep/zep-js@0.9.0)(@gomomento/sdk-core@1.68.1)(@gomomento/sdk@1.68.1(encoding@0.1.13))(@google-ai/generativelanguage@2.6.0(encoding@0.1.13))(@google-cloud/storage@7.16.0(encoding@0.1.13))(@google/genai@0.7.0(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.4))(@huggingface/inference@2.6.4)(@ibm-cloud/watsonx-ai@1.2.0)(@langchain/anthropic@0.3.14(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13))(@langchain/aws@0.1.4(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/cohere@0.0.7(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/google-genai@0.2.3(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(zod@3.22.4))(@langchain/google-vertexai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(zod@3.22.4))(@langchain/groq@0.1.2(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)))(@langchain/mistralai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/ollama@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@mendable/firecrawl-js@1.25.1)(@mistralai/mistralai@0.1.3(encoding@0.1.13))(@notionhq/client@2.2.14(encoding@0.1.13))(@opensearch-project/opensearch@1.2.0)(@pinecone-database/pinecone@4.0.0)(@qdrant/js-client-rest@1.9.0(typescript@5.5.2))(@smithy/eventstream-codec@4.0.1)(@smithy/protocol-http@5.0.1)(@smithy/signature-v4@5.0.1)(@smithy/util-utf8@4.0.0)(@supabase/supabase-js@2.39.8(bufferutil@4.0.8)(utf-8-validate@6.0.4))(@types/jest@29.5.14)(@types/pg@8.11.2)(@types/sqlite3@3.1.11)(@upstash/redis@1.22.1(encoding@0.1.13))(@upstash/vector@1.1.5)(@zilliz/milvus2-sdk-node@2.3.5)(apify-client@2.9.3)(assemblyai@4.3.2(bufferutil@4.0.8)(utf-8-validate@6.0.4))(cheerio@1.0.0-rc.12)(chromadb@1.10.3(@google/generative-ai@0.24.0)(cohere-ai@7.10.0(encoding@0.1.13))(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(cohere-ai@7.10.0(encoding@0.1.13))(crypto-js@4.2.0)(d3-dsv@2.0.0)(encoding@0.1.13)(epub2@3.0.2(ts-toolbelt@9.6.0))(fast-xml-parser@4.4.1)(google-auth-library@9.6.3(encoding@0.1.13))(groq-sdk@0.5.0(encoding@0.1.13))(handlebars@4.7.8)(html-to-text@9.0.5)(ibm-cloud-sdk-core@5.1.0)(ignore@5.3.1)(ioredis@5.3.2)(jsdom@22.1.0(bufferutil@4.0.8)(canvas@2.11.2(encoding@0.1.13))(utf-8-validate@6.0.4))(jsonwebtoken@9.0.2)(lodash@4.17.21)(lunary@0.7.12(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(react@18.2.0))(mammoth@1.7.0)(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(neo4j-driver@5.27.0)(notion-to-md@3.1.1(encoding@0.1.13))(ollama@0.5.11)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(pdf-parse@1.1.1)(pg@8.11.3)(playwright@1.42.1)(portkey-ai@0.1.16)(puppeteer@20.9.0(bufferutil@4.0.8)(encoding@0.1.13)(typescript@5.5.2)(utf-8-validate@6.0.4))(pyodide@0.25.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(redis@4.6.13)(replicate@0.31.1)(sqlite3@5.1.7)(srt-parser-2@1.2.3)(typeorm@0.3.20(ioredis@5.3.2)(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2)))(weaviate-ts-client@1.6.0(encoding@0.1.13)(graphql@16.8.1))(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)) '@mendable/firecrawl-js': - specifier: ^0.0.28 - version: 0.0.28 + specifier: ^1.18.2 + version: 1.25.1 '@mistralai/mistralai': specifier: 0.1.3 version: 0.1.3(encoding@0.1.13) '@modelcontextprotocol/sdk': specifier: ^1.10.1 - version: 1.10.1 + version: 1.10.2 '@modelcontextprotocol/server-brave-search': specifier: ^0.6.2 version: 0.6.2 @@ -272,7 +272,7 @@ importers: version: 1.9.0(typescript@5.5.2) '@stripe/agent-toolkit': specifier: ^0.1.20 - version: 0.1.20(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(ai@3.2.22(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(react@18.2.0)(solid-js@1.7.1)(svelte@4.2.18)(vue@3.4.31(typescript@5.5.2))(zod@3.22.4)) + version: 0.1.21(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(ai@3.2.22(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(react@18.2.0)(solid-js@1.7.1)(svelte@4.2.18)(vue@3.4.31(typescript@5.5.2))(zod@3.22.4)) '@supabase/supabase-js': specifier: ^2.29.0 version: 2.39.8(bufferutil@4.0.8)(utf-8-validate@6.0.4) @@ -305,7 +305,7 @@ importers: version: 1.0.0-rc.12 chromadb: specifier: ^1.10.0 - version: 1.10.0(@google/generative-ai@0.24.0)(cohere-ai@7.10.0(encoding@0.1.13))(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)) + version: 1.10.3(@google/generative-ai@0.24.0)(cohere-ai@7.10.0(encoding@0.1.13))(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)) cohere-ai: specifier: ^7.7.5 version: 7.10.0(encoding@0.1.13) @@ -365,16 +365,16 @@ importers: version: 5.0.1 jsonrepair: specifier: ^3.11.1 - version: 3.11.2 + version: 3.12.0 langchain: specifier: ^0.3.5 - version: 0.3.5(@langchain/anthropic@0.3.14(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13))(@langchain/aws@0.1.4(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/cohere@0.0.7(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/google-genai@0.2.3(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(zod@3.22.4))(@langchain/google-vertexai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(zod@3.22.4))(@langchain/groq@0.1.2(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)))(@langchain/mistralai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/ollama@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(axios@1.7.9)(cheerio@1.0.0-rc.12)(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(typeorm@0.3.20(ioredis@5.3.2)(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2)))(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)) + version: 0.3.6(@langchain/anthropic@0.3.14(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13))(@langchain/aws@0.1.4(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/cohere@0.0.7(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/google-genai@0.2.3(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(zod@3.22.4))(@langchain/google-vertexai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(zod@3.22.4))(@langchain/groq@0.1.2(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)))(@langchain/mistralai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/ollama@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(axios@1.7.9)(cheerio@1.0.0-rc.12)(encoding@0.1.13)(handlebars@4.7.8)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(typeorm@0.3.20(ioredis@5.3.2)(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2)))(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)) langfuse: specifier: 3.3.4 version: 3.3.4 langfuse-langchain: specifier: ^3.3.4 - version: 3.3.4(langchain@0.3.5(@langchain/anthropic@0.3.14(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13))(@langchain/aws@0.1.4(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/cohere@0.0.7(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/google-genai@0.2.3(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(zod@3.22.4))(@langchain/google-vertexai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(zod@3.22.4))(@langchain/groq@0.1.2(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)))(@langchain/mistralai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/ollama@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(axios@1.7.9)(cheerio@1.0.0-rc.12)(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(typeorm@0.3.20(ioredis@5.3.2)(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2)))(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))) + version: 3.3.4(langchain@0.3.6(@langchain/anthropic@0.3.14(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13))(@langchain/aws@0.1.4(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/cohere@0.0.7(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/google-genai@0.2.3(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(zod@3.22.4))(@langchain/google-vertexai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(zod@3.22.4))(@langchain/groq@0.1.2(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)))(@langchain/mistralai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/ollama@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(axios@1.7.9)(cheerio@1.0.0-rc.12)(encoding@0.1.13)(handlebars@4.7.8)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(typeorm@0.3.20(ioredis@5.3.2)(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2)))(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))) langsmith: specifier: 0.1.6 version: 0.1.6 @@ -462,9 +462,12 @@ importers: srt-parser-2: specifier: ^1.2.3 version: 1.2.3 + supergateway: + specifier: 3.0.1 + version: 3.0.1(bufferutil@4.0.8)(utf-8-validate@6.0.4) typeorm: specifier: ^0.3.6 - version: 0.3.20(ioredis@5.3.2)(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2)) + version: 0.3.20(ioredis@5.3.2)(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2)) weaviate-ts-client: specifier: ^1.1.0 version: 1.6.0(encoding@0.1.13)(graphql@16.8.1) @@ -531,10 +534,13 @@ importers: dependencies: '@aws-sdk/client-secrets-manager': specifier: ^3.699.0 - version: 3.723.0 + version: 3.726.1 '@google-cloud/logging-winston': specifier: ^6.0.0 version: 6.0.0(encoding@0.1.13)(winston@3.12.0) + '@keyv/redis': + specifier: ^4.2.0 + version: 4.3.3 '@oclif/core': specifier: 4.0.7 version: 4.0.7 @@ -543,7 +549,7 @@ importers: version: 1.9.0 '@opentelemetry/auto-instrumentations-node': specifier: ^0.52.0 - version: 0.52.0(@opentelemetry/api@1.9.0)(encoding@0.1.13) + version: 0.52.1(@opentelemetry/api@1.9.0)(encoding@0.1.13) '@opentelemetry/core': specifier: 1.27.0 version: 1.27.0(@opentelemetry/api@1.9.0) @@ -573,16 +579,28 @@ importers: version: 1.27.0(@opentelemetry/api@1.9.0) '@opentelemetry/sdk-node': specifier: ^0.54.0 - version: 0.54.0(@opentelemetry/api@1.9.0) + version: 0.54.2(@opentelemetry/api@1.9.0) '@opentelemetry/sdk-trace-base': specifier: 1.27.0 version: 1.27.0(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': specifier: 1.27.0 version: 1.27.0 + '@types/bcryptjs': + specifier: ^2.4.6 + version: 2.4.6 '@types/lodash': specifier: ^4.14.202 version: 4.14.202 + '@types/passport': + specifier: ^1.0.16 + version: 1.0.16 + '@types/passport-jwt': + specifier: ^4.0.1 + version: 4.0.1 + '@types/passport-local': + specifier: ^1.0.38 + version: 1.0.38 '@types/uuid': specifier: ^9.0.7 version: 9.0.8 @@ -592,21 +610,42 @@ importers: axios: specifier: 1.7.9 version: 1.7.9(debug@4.3.4) + bcryptjs: + specifier: ^2.4.3 + version: 2.4.3 bull-board: specifier: ^2.1.3 version: 2.1.3 bullmq: - specifier: ^5.42.0 - version: 5.43.0 + specifier: 5.45.2 + version: 5.45.2 + cache-manager: + specifier: ^6.3.2 + version: 6.4.2 + connect-pg-simple: + specifier: ^10.0.0 + version: 10.0.0 + connect-redis: + specifier: ^8.0.1 + version: 8.0.2(express-session@1.18.1) + connect-sqlite3: + specifier: ^0.9.15 + version: 0.9.15 content-disposition: specifier: 0.5.4 version: 0.5.4 + cookie-parser: + specifier: ^1.4.6 + version: 1.4.6 cors: specifier: ^2.8.5 version: 2.8.5 crypto-js: specifier: ^4.1.1 version: 4.2.0 + csv-parser: + specifier: ^3.0.0 + version: 3.0.0 dotenv: specifier: ^16.0.0 version: 16.4.5 @@ -616,9 +655,15 @@ importers: express-basic-auth: specifier: ^1.2.1 version: 1.2.1 + express-mysql-session: + specifier: ^3.0.3 + version: 3.0.3 express-rate-limit: specifier: ^6.9.0 version: 6.11.2(express@4.18.3) + express-session: + specifier: ^1.18.1 + version: 1.18.1 flowise-components: specifier: workspace:^ version: link:../components @@ -631,12 +676,24 @@ importers: global-agent: specifier: ^3.0.0 version: 3.0.0 + gulp: + specifier: ^4.0.2 + version: 4.0.2 + handlebars: + specifier: ^4.7.8 + version: 4.7.8 http-errors: specifier: ^2.0.0 version: 2.0.0 http-status-codes: specifier: ^2.3.0 version: 2.3.0 + jsonwebtoken: + specifier: ^9.0.2 + version: 9.0.2 + jwt-decode: + specifier: ^4.0.0 + version: 4.0.0 langchainhub: specifier: ^0.0.11 version: 0.0.11 @@ -661,9 +718,39 @@ importers: mysql2: specifier: ^3.11.3 version: 3.11.4 + nanoid: + specifier: '3' + version: 3.3.7 + nodemailer: + specifier: ^6.9.14 + version: 6.9.15 openai: specifier: 4.96.0 version: 4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4) + passport: + specifier: ^0.7.0 + version: 0.7.0 + passport-auth0: + specifier: ^1.4.4 + version: 1.4.4 + passport-cookie: + specifier: ^1.0.9 + version: 1.0.9 + passport-github: + specifier: ^1.1.0 + version: 1.1.0 + passport-google-oauth20: + specifier: ^2.0.0 + version: 2.0.0 + passport-jwt: + specifier: ^4.0.1 + version: 4.0.1 + passport-local: + specifier: ^1.0.0 + version: 1.0.0 + passport-openidconnect: + specifier: ^0.1.2 + version: 0.1.2 pg: specifier: ^8.11.1 version: 8.11.3 @@ -688,37 +775,70 @@ importers: sqlite3: specifier: ^5.1.6 version: 5.1.7 + stripe: + specifier: ^15.6.0 + version: 15.12.0 turndown: specifier: ^7.2.0 version: 7.2.0 typeorm: specifier: ^0.3.6 - version: 0.3.20(ioredis@5.4.2)(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2)) + version: 0.3.20(ioredis@5.4.2)(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(pg@8.11.3)(redis@4.7.0)(sqlite3@5.1.7)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2)) uuid: specifier: ^9.0.1 version: 9.0.1 winston: specifier: ^3.9.0 version: 3.12.0 + winston-daily-rotate-file: + specifier: ^5.0.0 + version: 5.0.0(winston@3.12.0) devDependencies: '@types/content-disposition': specifier: 0.5.8 version: 0.5.8 + '@types/cookie-parser': + specifier: ^1.4.7 + version: 1.4.7 '@types/cors': specifier: ^2.8.12 version: 2.8.17 '@types/crypto-js': specifier: ^4.1.1 version: 4.2.2 + '@types/express-session': + specifier: ^1.18.0 + version: 1.18.0 + '@types/jest': + specifier: ^29.5.14 + version: 29.5.14 + '@types/jsonwebtoken': + specifier: ^9.0.6 + version: 9.0.6 '@types/multer': specifier: ^1.4.7 version: 1.4.11 '@types/multer-s3': specifier: ^3.0.3 version: 3.0.3 + '@types/nodemailer': + specifier: ^6.4.15 + version: 6.4.15 + '@types/passport-auth0': + specifier: ^1.0.9 + version: 1.0.9 + '@types/passport-github': + specifier: ^1.1.12 + version: 1.1.12 + '@types/passport-openidconnect': + specifier: ^0.1.3 + version: 0.1.3 '@types/sanitize-html': specifier: ^2.9.5 version: 2.11.0 + '@types/supertest': + specifier: ^6.0.3 + version: 6.0.3 '@types/turndown': specifier: ^5.0.5 version: 5.0.5 @@ -728,12 +848,15 @@ importers: cypress: specifier: ^13.13.0 version: 13.13.0 + jest: + specifier: ^29.7.0 + version: 29.7.0(@types/node@22.5.4)(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2)) nodemon: specifier: ^2.0.22 version: 2.0.22 oclif: specifier: ^3 - version: 3.17.2(@swc/core@1.4.6)(@types/node@22.13.9)(encoding@0.1.13)(mem-fs@2.3.0)(typescript@5.5.2) + version: 3.17.2(@swc/core@1.4.6)(@types/node@22.5.4)(encoding@0.1.13)(mem-fs@2.3.0)(typescript@5.5.2) rimraf: specifier: ^5.0.5 version: 5.0.5 @@ -746,9 +869,15 @@ importers: start-server-and-test: specifier: ^2.0.3 version: 2.0.3 + supertest: + specifier: ^7.1.0 + version: 7.1.0 + ts-jest: + specifier: ^29.3.2 + version: 29.3.2(@babel/core@7.24.0)(@jest/transform@29.7.0)(@jest/types@29.6.3)(babel-jest@29.7.0(@babel/core@7.24.0))(jest@29.7.0(@types/node@22.5.4)(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2)))(typescript@5.5.2) ts-node: specifier: ^10.7.0 - version: 10.9.2(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2) + version: 10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2) tsc-watch: specifier: ^6.0.4 version: 6.0.4(typescript@5.5.2) @@ -764,9 +893,12 @@ importers: '@codemirror/lang-json': specifier: ^6.0.1 version: 6.0.1 + '@codemirror/lang-markdown': + specifier: ^6.2.5 + version: 6.2.5 '@codemirror/view': - specifier: ^6.22.3 - version: 6.25.1 + specifier: ^6.26.3 + version: 6.26.3 '@emotion/cache': specifier: ^11.4.0 version: 11.11.0 @@ -803,6 +935,9 @@ importers: '@mui/x-tree-view': specifier: ^7.25.0 version: 7.29.1(@emotion/react@11.11.4(@types/react@18.2.65)(react@18.2.0))(@emotion/styled@11.11.0(@emotion/react@11.11.4(@types/react@18.2.65)(react@18.2.0))(@types/react@18.2.65)(react@18.2.0))(@mui/material@5.15.0(@emotion/react@11.11.4(@types/react@18.2.65)(react@18.2.0))(@emotion/styled@11.11.0(@emotion/react@11.11.4(@types/react@18.2.65)(react@18.2.0))(@types/react@18.2.65)(react@18.2.0))(@types/react@18.2.65)(react-dom@18.2.0(react@18.2.0))(react@18.2.0))(@mui/system@6.4.7(@emotion/react@11.11.4(@types/react@18.2.65)(react@18.2.0))(@emotion/styled@11.11.0(@emotion/react@11.11.4(@types/react@18.2.65)(react@18.2.0))(@types/react@18.2.65)(react@18.2.0))(@types/react@18.2.65)(react@18.2.0))(@types/react@18.2.65)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@reduxjs/toolkit': + specifier: ^2.2.7 + version: 2.2.7(react-redux@8.1.3(@types/react-dom@18.2.21)(@types/react@18.2.65)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)(redux@4.2.1))(react@18.2.0) '@tabler/icons-react': specifier: ^3.30.0 version: 3.31.0(react@18.2.0) @@ -823,13 +958,13 @@ importers: version: 2.12.0 '@uiw/codemirror-theme-sublime': specifier: ^4.21.21 - version: 4.21.24(@codemirror/language@6.10.1)(@codemirror/state@6.4.1)(@codemirror/view@6.25.1) + version: 4.21.24(@codemirror/language@6.10.1)(@codemirror/state@6.4.1)(@codemirror/view@6.26.3) '@uiw/codemirror-theme-vscode': specifier: ^4.21.21 - version: 4.21.24(@codemirror/language@6.10.1)(@codemirror/state@6.4.1)(@codemirror/view@6.25.1) + version: 4.21.24(@codemirror/language@6.10.1)(@codemirror/state@6.4.1)(@codemirror/view@6.26.3) '@uiw/react-codemirror': specifier: ^4.21.21 - version: 4.21.24(@babel/runtime@7.26.9)(@codemirror/autocomplete@6.14.0(@codemirror/language@6.10.1)(@codemirror/state@6.4.1)(@codemirror/view@6.25.1)(@lezer/common@1.2.1))(@codemirror/language@6.10.1)(@codemirror/lint@6.5.0)(@codemirror/search@6.5.6)(@codemirror/state@6.4.1)(@codemirror/theme-one-dark@6.1.2)(@codemirror/view@6.25.1)(codemirror@6.0.1(@lezer/common@1.2.1))(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + version: 4.21.24(@babel/runtime@7.26.10)(@codemirror/autocomplete@6.14.0(@codemirror/language@6.10.1)(@codemirror/state@6.4.1)(@codemirror/view@6.26.3)(@lezer/common@1.2.1))(@codemirror/language@6.10.1)(@codemirror/lint@6.5.0)(@codemirror/search@6.5.6)(@codemirror/state@6.4.1)(@codemirror/theme-one-dark@6.1.2)(@codemirror/view@6.26.3)(codemirror@6.0.1(@lezer/common@1.2.1))(react-dom@18.2.0(react@18.2.0))(react@18.2.0) axios: specifier: 1.7.9 version: 1.7.9(debug@4.3.4) @@ -844,7 +979,7 @@ importers: version: 3.0.3 flowise-embed-react: specifier: latest - version: 3.0.3(@types/node@22.13.9)(flowise-embed@3.0.3)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)(sass@1.71.1)(terser@5.29.1)(typescript@5.5.2) + version: 3.0.3(@types/node@22.5.4)(flowise-embed@3.0.3)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)(sass@1.71.1)(terser@5.29.1)(typescript@5.5.2) flowise-react-json-view: specifier: '*' version: 1.21.7(@types/react@18.2.65)(encoding@0.1.13)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) @@ -913,7 +1048,10 @@ importers: version: 15.5.0(react@18.2.0) reactflow: specifier: ^11.5.6 - version: 11.10.4(@types/react@18.2.65)(immer@9.0.21)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + version: 11.10.4(@types/react@18.2.65)(immer@10.1.1)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + recharts: + specifier: ^2.12.6 + version: 2.12.7(react-dom@18.2.0(react@18.2.0))(react@18.2.0) redux: specifier: ^4.0.5 version: 4.2.1 @@ -956,13 +1094,13 @@ importers: version: 12.8.3(@testing-library/dom@9.3.4) '@vitejs/plugin-react': specifier: ^4.2.0 - version: 4.2.1(vite@5.1.6(@types/node@22.13.9)(sass@1.71.1)(terser@5.29.1)) + version: 4.2.1(vite@5.1.6(@types/node@22.5.4)(sass@1.71.1)(terser@5.29.1)) pretty-quick: specifier: ^3.1.3 version: 3.3.1(prettier@3.2.5) react-scripts: specifier: ^5.0.1 - version: 5.0.1(@babel/plugin-syntax-flow@7.23.3(@babel/core@7.24.0))(@babel/plugin-transform-react-jsx@7.25.9(@babel/core@7.24.0))(@swc/core@1.4.6)(@types/babel__core@7.20.5)(bufferutil@4.0.8)(canvas@2.11.2(encoding@0.1.13))(eslint@8.57.0)(react@18.2.0)(sass@1.71.1)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2))(type-fest@4.12.0)(typescript@5.5.2)(utf-8-validate@6.0.4) + version: 5.0.1(@babel/plugin-syntax-flow@7.23.3(@babel/core@7.24.0))(@babel/plugin-transform-react-jsx@7.25.9(@babel/core@7.24.0))(@swc/core@1.4.6)(@types/babel__core@7.20.5)(bufferutil@4.0.8)(canvas@2.11.2(encoding@0.1.13))(eslint@8.57.0)(react@18.2.0)(sass@1.71.1)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2))(type-fest@4.40.1)(typescript@5.5.2)(utf-8-validate@6.0.4) rimraf: specifier: ^5.0.5 version: 5.0.5 @@ -974,10 +1112,10 @@ importers: version: 5.5.2 vite: specifier: ^5.0.2 - version: 5.1.6(@types/node@22.13.9)(sass@1.71.1)(terser@5.29.1) + version: 5.1.6(@types/node@22.5.4)(sass@1.71.1)(terser@5.29.1) vite-plugin-pwa: specifier: ^0.17.0 - version: 0.17.5(vite@5.1.6(@types/node@22.13.9)(sass@1.71.1)(terser@5.29.1))(workbox-build@7.0.0(@types/babel__core@7.20.5))(workbox-window@7.0.0) + version: 0.17.5(vite@5.1.6(@types/node@22.5.4)(sass@1.71.1)(terser@5.29.1))(workbox-build@7.0.0(@types/babel__core@7.20.5))(workbox-window@7.0.0) vite-plugin-react-js-support: specifier: ^1.0.7 version: 1.0.7 @@ -1188,8 +1326,8 @@ packages: resolution: { integrity: sha512-ZpvyO4w3XWo/OjXLd3fm7CLcKUUYcyady9qzTnKKSnp8a2NqO7UvU/1zhYdm+yyy8TR/9t7sDy+q6AYd4Nsr8g== } engines: { node: '>=14.0.0' } - '@aws-sdk/client-secrets-manager@3.723.0': - resolution: { integrity: sha512-Zh+j0J9iog4c9l8re9EXvS3/+ylVwbJIFbqDJUvqszdCrTAFQGaGVdxoJND5WC5Ggr6Syiy7ZDj0p+yrEQOObA== } + '@aws-sdk/client-secrets-manager@3.726.1': + resolution: { integrity: sha512-eO9WpE8IyQrs2xWhfQCdHcVTHQTwJ56JGx3FhwhtFWWYHIS0c1bTIAvP5E3jSWAZNaK1iWdVexz3yGi3aAnGzA== } engines: { node: '>=18.0.0' } '@aws-sdk/client-sso-oidc@3.529.1': @@ -1198,11 +1336,11 @@ packages: peerDependencies: '@aws-sdk/credential-provider-node': ^3.529.1 - '@aws-sdk/client-sso-oidc@3.723.0': - resolution: { integrity: sha512-9IH90m4bnHogBctVna2FnXaIGVORncfdxcqeEIovOxjIJJyHDmEAtA7B91dAM4sruddTbVzOYnqfPVst3odCbA== } + '@aws-sdk/client-sso-oidc@3.726.0': + resolution: { integrity: sha512-5JzTX9jwev7+y2Jkzjz0pd1wobB5JQfPOQF3N2DrJ5Pao0/k6uRYwE4NqB0p0HlGrMTDm7xNq7OSPPIPG575Jw== } engines: { node: '>=18.0.0' } peerDependencies: - '@aws-sdk/client-sts': ^3.723.0 + '@aws-sdk/client-sts': ^3.726.0 '@aws-sdk/client-sso@3.421.0': resolution: { integrity: sha512-40CmW7K2/FZEn3CbOjbpRYeVjKu6aJQlpRHcAgEJGNoVEAnRA3YNH4H0BN2iWWITfYg3B7sIjMm5VE9fCIK1Ng== } @@ -1212,8 +1350,8 @@ packages: resolution: { integrity: sha512-KT1U/ZNjDhVv2ZgjzaeAn9VM7l667yeSguMrRYC8qk5h91/61MbjZypi6eOuKuVM+0fsQvzKScTQz0Lio0eYag== } engines: { node: '>=14.0.0' } - '@aws-sdk/client-sso@3.723.0': - resolution: { integrity: sha512-r1ddZDb8yPmdofX1gQ4m8oqKozgkgVONLlAuSprGObbyMy8bYt1Psxu+GjnwMmgVu3vlF069PHyW1ndrBiL1zA== } + '@aws-sdk/client-sso@3.726.0': + resolution: { integrity: sha512-NM5pjv2qglEc4XN3nnDqtqGsSGv1k5YTmzDo3W3pObItHmpS8grSeNfX9zSH+aVl0Q8hE4ZIgvTPNZ+GzwVlqg== } engines: { node: '>=18.0.0' } '@aws-sdk/client-sso@3.750.0': @@ -1230,8 +1368,8 @@ packages: peerDependencies: '@aws-sdk/credential-provider-node': ^3.529.1 - '@aws-sdk/client-sts@3.723.0': - resolution: { integrity: sha512-YyN8x4MI/jMb4LpHsLf+VYqvbColMK8aZeGWVk2fTFsmt8lpTYGaGC1yybSwGX42mZ4W8ucu8SAYSbUraJZEjA== } + '@aws-sdk/client-sts@3.726.1': + resolution: { integrity: sha512-qh9Q9Vu1hrM/wMBOBIaskwnE4GTFaZu26Q6WHwyWNfj7J8a40vBxpW16c2vYXHLBtwRKM1be8uRLkmDwghpiNw== } engines: { node: '>=18.0.0' } '@aws-sdk/core@3.529.1': @@ -1282,11 +1420,11 @@ packages: resolution: { integrity: sha512-RjHsuTvHIwXG7a/3ERexemiD3c9riKMCZQzY2/b0Gg0ButEVbBcMfERtUzWmQ0V4ufe/PEZjP68MH1gupcoF9A== } engines: { node: '>=14.0.0' } - '@aws-sdk/credential-provider-ini@3.723.0': - resolution: { integrity: sha512-fWRLksuSG851e7Iu+ltMrQTM7C/5iI9OkxAmCYblcCetAzjTRmMB2arku0Z83D8edIZEQtOJMt5oQ9KNg43pzg== } + '@aws-sdk/credential-provider-ini@3.726.0': + resolution: { integrity: sha512-seTtcKL2+gZX6yK1QRPr5mDJIBOatrpoyrO8D5b8plYtV/PDbDW3mtDJSWFHet29G61ZmlNElyXRqQCXn9WX+A== } engines: { node: '>=18.0.0' } peerDependencies: - '@aws-sdk/client-sts': ^3.723.0 + '@aws-sdk/client-sts': ^3.726.0 '@aws-sdk/credential-provider-ini@3.750.0': resolution: { integrity: sha512-2YIZmyEr5RUd3uxXpxOLD9G67Bibm4I/65M6vKFP17jVMUT+R1nL7mKqmhEVO2p+BoeV+bwMyJ/jpTYG368PCg== } @@ -1300,8 +1438,8 @@ packages: resolution: { integrity: sha512-mvY7F3dMmk/0dZOCfl5sUI1bG0osureBjxhELGCF0KkJqhWI0hIzh8UnPkYytSg3vdc97CMv7pTcozxrdA3b0g== } engines: { node: '>=14.0.0' } - '@aws-sdk/credential-provider-node@3.723.0': - resolution: { integrity: sha512-OyLHt+aY+rkuRejigcxviS5RLUBcqbxhDTSNfP8dp9I+1SP610qRLpTIROvtKwXZssFcATpPfgikFtVYRrihXQ== } + '@aws-sdk/credential-provider-node@3.726.0': + resolution: { integrity: sha512-jjsewBcw/uLi24x8JbnuDjJad4VA9ROCE94uVRbEnGmUEsds75FWOKp3fWZLQlmjLtzsIbJOZLALkZP86liPaw== } engines: { node: '>=18.0.0' } '@aws-sdk/credential-provider-node@3.750.0': @@ -1332,8 +1470,8 @@ packages: resolution: { integrity: sha512-KFMKkaoTGDgSJG+o9Ii7AglWG5JQeF6IFw9cXLMwDdIrp3KUmRcUIqe0cjOoCqeQEDGy0VHsimHmKKJ3894i/A== } engines: { node: '>=14.0.0' } - '@aws-sdk/credential-provider-sso@3.723.0': - resolution: { integrity: sha512-laCnxrk0pgUegU+ib6rj1/Uv51wei+cH8crvBJddybc8EDn7Qht61tCvBwf3o33qUDC+ZWZZewlpSebf+J+tBw== } + '@aws-sdk/credential-provider-sso@3.726.0': + resolution: { integrity: sha512-WxkN76WeB08j2yw7jUH9yCMPxmT9eBFd9ZA/aACG7yzOIlsz7gvG3P2FQ0tVg25GHM0E4PdU3p/ByTOawzcOAg== } engines: { node: '>=18.0.0' } '@aws-sdk/credential-provider-sso@3.750.0': @@ -1464,8 +1602,8 @@ packages: resolution: { integrity: sha512-4al/6uO+t/QIYXK2OgqzDKQzzLAYJza1vWFS+S0lJ3jLNGyLB5BMU5KqWjDzevYZ4eCnz2Nn7z0FveUTNz8YdQ== } engines: { node: '>=14.0.0' } - '@aws-sdk/middleware-user-agent@3.723.0': - resolution: { integrity: sha512-AY5H2vD3IRElplBO4DCyRMNnOG/4/cb0tsHyLe1HJy0hdUF6eY5z/VVjKJoKbbDk7ui9euyOBWslXxDyLmyPWg== } + '@aws-sdk/middleware-user-agent@3.726.0': + resolution: { integrity: sha512-hZvzuE5S0JmFie1r68K2wQvJbzyxJFdzltj9skgnnwdvLe8F/tz7MqLkm28uV0m4jeHk0LpiBo6eZaPkQiwsZQ== } engines: { node: '>=18.0.0' } '@aws-sdk/middleware-user-agent@3.750.0': @@ -1522,6 +1660,10 @@ packages: resolution: { integrity: sha512-AqGIu4u+SxPiUuNBp2acCVcq80KDUFjxe6e3cMTvKWTzCbrVk1AXv0dAaJnCmdkWIha6zJDWxpIk/aL4EGhZ9A== } engines: { node: '>=14.0.0' } + '@aws-sdk/types@3.609.0': + resolution: { integrity: sha512-+Tqnh9w0h2LcrUsdXyT1F8mNhXz+tVYBtP19LpeEGntmvHwa2XzvLUCWpoIAIVsHp5+HdB2X9Sn0KAtmbFXc2Q== } + engines: { node: '>=16.0.0' } + '@aws-sdk/types@3.723.0': resolution: { integrity: sha512-LmK3kwiMZG1y5g3LGihT9mNkeNOmwEyPk6HGcJqh0wOSV4QpWoKu2epyKE4MLQNUUlz2kOVbVbOrwmI6ZcteuA== } engines: { node: '>=18.0.0' } @@ -1542,8 +1684,8 @@ packages: resolution: { integrity: sha512-DIW7WWU5tIGkeeKX6NJUyrEIdWMiqjLQG3XBzaUj+ufIENwNjdAHhlD8l2vX7Yr3JZRT6yN/84wBCj7Tw1xd1g== } engines: { node: '>=14.0.0' } - '@aws-sdk/util-endpoints@3.723.0': - resolution: { integrity: sha512-vR1ZfAUvrTtdA1Q78QxgR8TFgi2gzk+N4EmNjbyR5hHmeOXuaKRdhbNQAzLPYVe1aNUpoiy9cl8mWkg9SrNHBw== } + '@aws-sdk/util-endpoints@3.726.0': + resolution: { integrity: sha512-sLd30ASsPMoPn3XBK50oe/bkpJ4N8Bpb7SbhoxcY3Lk+fSASaWxbbXE81nbvCnkxrZCvkPOiDHzJCp1E2im71A== } engines: { node: '>=18.0.0' } '@aws-sdk/util-endpoints@3.743.0': @@ -1584,8 +1726,8 @@ packages: aws-crt: optional: true - '@aws-sdk/util-user-agent-node@3.723.0': - resolution: { integrity: sha512-uCtW5sGq8jCwA9w57TvVRIwNnPbSDD1lJaTIgotf7Jit2bTrYR64thgMy/drL5yU5aHOdFIQljqn/5aDXLtTJw== } + '@aws-sdk/util-user-agent-node@3.726.0': + resolution: { integrity: sha512-iEj6KX9o6IQf23oziorveRqyzyclWai95oZHDJtYav3fvLJKStwSjygO4xSF7ycHcTYeCHSLO1FFOHgGVs4Viw== } engines: { node: '>=18.0.0' } peerDependencies: aws-crt: '>=1.0.0' @@ -2707,8 +2849,8 @@ packages: resolution: { integrity: sha512-Chk32uHMg6TnQdvw2e9IlqPpFX/6NLuK0Ys2PqLb7/gL5uFn9mXvK715FGLlOLQrcO4qIkNHkvPGktzzXexsFw== } engines: { node: '>=6.9.0' } - '@babel/runtime@7.26.9': - resolution: { integrity: sha512-aA63XwOkcl4xxQa3HjPMqOP6LiK0ZDv3mUPYEFXkpHbaFjtGggE1A61FjFzJnB+p7/oy2gA8E+rcBNl/zC1tMg== } + '@babel/runtime@7.26.10': + resolution: { integrity: sha512-2WJMeRQPHKSPemqk/awGrAiuFfzBmOIPXKizAsVhWH9YJqLZ0H+HS4c8loHGgW6utJ3E/ejXQUsiGaQy2NZ9Fw== } engines: { node: '>=6.9.0' } '@babel/template@7.25.9': @@ -2761,12 +2903,21 @@ packages: '@codemirror/commands@6.5.0': resolution: { integrity: sha512-rK+sj4fCAN/QfcY9BEzYMgp4wwL/q5aj/VfNSoH1RWPF9XS/dUwBkvlL3hpWgEjOqlpdN1uLC9UkjJ4tmyjJYg== } + '@codemirror/lang-css@6.2.1': + resolution: { integrity: sha512-/UNWDNV5Viwi/1lpr/dIXJNWiwDxpw13I4pTUAsNxZdg6E0mI2kTQb0P2iHczg1Tu+H4EBgJR+hYhKiHKko7qg== } + + '@codemirror/lang-html@6.4.9': + resolution: { integrity: sha512-aQv37pIMSlueybId/2PVSP6NPnmurFDVmZwzc7jszd2KAF8qd4VBbvNYPXWQq90WIARjsdVkPbw29pszmHws3Q== } + '@codemirror/lang-javascript@6.2.2': resolution: { integrity: sha512-VGQfY+FCc285AhWuwjYxQyUQcYurWlxdKYT4bqwr3Twnd5wP5WSeu52t4tvvuWmljT4EmgEgZCqSieokhtY8hg== } '@codemirror/lang-json@6.0.1': resolution: { integrity: sha512-+T1flHdgpqDDlJZ2Lkil/rLiRy684WMLc74xUnjJH48GQdfJo/pudlTRreZmKwzP8/tGdKf83wlbAdOCzlJOGQ== } + '@codemirror/lang-markdown@6.2.5': + resolution: { integrity: sha512-Hgke565YcO4fd9pe2uLYxnMufHO5rQwRr+AAhFq8ABuhkrjyX8R5p5s+hZUTdV60O0dMRjxKhBLxz8pu/MkUVA== } + '@codemirror/language@6.10.1': resolution: { integrity: sha512-5GrXzrhq6k+gL5fjkAwt90nYDmjlzTIJV8THnxNFtNKWotMIlzzN+CpqxqwXOECnUdOndmSeWntVrVcv5axWRQ== } @@ -2782,9 +2933,6 @@ packages: '@codemirror/theme-one-dark@6.1.2': resolution: { integrity: sha512-F+sH0X16j/qFLMAfbciKTxVOwkdAS336b7AXTKOZhy8BR3eH/RelsnLgLFINrpST63mmN2OuwUt0W2ndUgYwUA== } - '@codemirror/view@6.25.1': - resolution: { integrity: sha512-2LXLxsQnHDdfGzDvjzAwZh2ZviNJm7im6tGpa0IONIDnFd8RZ80D2SNi8PDi6YjKcMoMRK20v6OmKIdsrwsyoQ== } - '@codemirror/view@6.26.3': resolution: { integrity: sha512-gmqxkPALZjkgSxIeeweY/wGQXBfwTUaLs8h7OKtSwfbj9Ct3L11lD+u1sS7XHppxFQoMDiMDp07P9f3I2jWOHw== } @@ -3438,6 +3586,10 @@ packages: resolution: { integrity: sha512-hfwfdlVpJ+kM6o2b5UFfPnweBcz8tgHAFRswnqUKYqLJsvKU0DDD0Z2/YKoHyAUoPJAv20qg6KlC3msNeUKUiw== } engines: { node: '>=18.0.0' } + '@google/genai@0.7.0': + resolution: { integrity: sha512-r+Fwj/emnXZN5R+4JCxDXboY4AGTmTn7+Wnori5dgyJiStP0P82f9YYL0CVsCnDIumNY2i0UIcZ1zGZdtHJ34w== } + engines: { node: '>=18.0.0' } + '@google/generative-ai@0.24.0': resolution: { integrity: sha512-fnEITCGEB7NdX0BhoYZ/cq/7WPZ1QS5IzJJfC3Tg/OwkvBetMiVJciyaan297OvE4B9Jg1xvo0zIazX/9sGu1Q== } engines: { node: '>=18.0.0' } @@ -3485,6 +3637,7 @@ packages: '@humanwhocodes/config-array@0.11.14': resolution: { integrity: sha512-3T8LkOmg45BV5FICb15QQMsyUSWrQ8AygVfC7ZG32zOalnqrilm018ZVCw0eapXux8FtA33q8PSRSstjee3jSg== } engines: { node: '>=10.10.0' } + deprecated: Use @eslint/config-array instead '@humanwhocodes/module-importer@1.0.1': resolution: { integrity: sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA== } @@ -3492,9 +3645,10 @@ packages: '@humanwhocodes/object-schema@2.0.2': resolution: { integrity: sha512-6EwiSjwWYP7pTckG6I5eyFANjPhmPjUX9JRLUSfNPC7FX7zK9gyZAfUEaECL6ALTpGX5AjnBq3C9XmVWPitNpw== } + deprecated: Use @eslint/object-schema instead - '@ibm-cloud/watsonx-ai@1.1.2': - resolution: { integrity: sha512-0+ClK12jk1Jk28Hwc2BDmKkTXPjFkQOfCKzUk82TsoPwAIEVN+rlM1cny52d3oSMXXbeKorVDmnIEbXPseHiQA== } + '@ibm-cloud/watsonx-ai@1.2.0': + resolution: { integrity: sha512-9eAyLr0sMXmrG6nSjQVqomkl8xd7m1Iy37XPgM2AjB+yjRD5IPjKgwbOXx9xWCa47V0Jt3zrPZWiPUDdaNlU4Q== } engines: { node: '>=18.0.0' } '@icons/material@0.2.4': @@ -3633,6 +3787,10 @@ packages: resolution: { integrity: sha512-QPAkP5EwKdK/bxIr6C1I4Vs0rm2nHiANzj/Z5X2JQkrZo6IqvC4ldZ9K95tF0HdidhA8Bo6egxSzUFPYKcEXLw== } engines: { node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0 } + '@jest/console@29.7.0': + resolution: { integrity: sha512-5Ni4CU7XHQi32IJ398EEP4RrB8eV09sXP2ROqD4bksHrnTree52PsxvX8tpL8LvTZ3pFzXyPbNQReSN41CAhOg== } + engines: { node: ^14.15.0 || ^16.10.0 || >=18.0.0 } + '@jest/core@27.5.1': resolution: { integrity: sha512-AK6/UTrvQD0Cd24NSqmIA6rKsu0tKIxfiCducZvqxYdmMisOYAsdItspT+fQDQYARPf8XgjAFZi0ogW2agH5nQ== } engines: { node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0 } @@ -3642,22 +3800,47 @@ packages: node-notifier: optional: true + '@jest/core@29.7.0': + resolution: { integrity: sha512-n7aeXWKMnGtDA48y8TLWJPJmLmmZ642Ceo78cYWEpiD7FzDgmNDV/GCVRorPABdXLJZ/9wzzgZAlHjXjxDHGsg== } + engines: { node: ^14.15.0 || ^16.10.0 || >=18.0.0 } + peerDependencies: + node-notifier: ^8.0.1 || ^9.0.0 || ^10.0.0 + peerDependenciesMeta: + node-notifier: + optional: true + '@jest/environment@27.5.1': resolution: { integrity: sha512-/WQjhPJe3/ghaol/4Bq480JKXV/Rfw8nQdN7f41fM8VDHLcxKXou6QyXAh3EFr9/bVG3x74z1NWDkP87EiY8gA== } engines: { node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0 } + '@jest/environment@29.7.0': + resolution: { integrity: sha512-aQIfHDq33ExsN4jP1NWGXhxgQ/wixs60gDiKO+XVMd8Mn0NWPWgc34ZQDTb2jKaUWQ7MuwoitXAsN2XVXNMpAw== } + engines: { node: ^14.15.0 || ^16.10.0 || >=18.0.0 } + '@jest/expect-utils@29.7.0': resolution: { integrity: sha512-GlsNBWiFQFCVi9QVSx7f5AgMeLxe9YCCs5PuP2O2LdjDAA8Jh9eX7lA1Jq/xdXw3Wb3hyvlFNfZIfcRetSzYcA== } engines: { node: ^14.15.0 || ^16.10.0 || >=18.0.0 } + '@jest/expect@29.7.0': + resolution: { integrity: sha512-8uMeAMycttpva3P1lBHB8VciS9V0XAr3GymPpipdyQXbBcuhkLQOSe8E/p92RyAdToS6ZD1tFkX+CkhoECE0dQ== } + engines: { node: ^14.15.0 || ^16.10.0 || >=18.0.0 } + '@jest/fake-timers@27.5.1': resolution: { integrity: sha512-/aPowoolwa07k7/oM3aASneNeBGCmGQsc3ugN4u6s4C/+s5M64MFo/+djTdiwcbQlRfFElGuDXWzaWj6QgKObQ== } engines: { node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0 } + '@jest/fake-timers@29.7.0': + resolution: { integrity: sha512-q4DH1Ha4TTFPdxLsqDXK1d3+ioSL7yL5oCMJZgDYm6i+6CygW5E5xVr/D1HdsGxjt1ZWSfUAs9OxSB/BNelWrQ== } + engines: { node: ^14.15.0 || ^16.10.0 || >=18.0.0 } + '@jest/globals@27.5.1': resolution: { integrity: sha512-ZEJNB41OBQQgGzgyInAv0UUfDDj3upmHydjieSxFvTRuZElrx7tXg/uVQ5hYVEwiXs3+aMsAeEc9X7xiSKCm4Q== } engines: { node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0 } + '@jest/globals@29.7.0': + resolution: { integrity: sha512-mpiz3dutLbkW2MNFubUGUEVLkTGiqW6yLVTA+JbP6fI6J5iL9Y0Nlg8k95pcF8ctKwCS7WVxteBs29hhfAotzQ== } + engines: { node: ^14.15.0 || ^16.10.0 || >=18.0.0 } + '@jest/reporters@27.5.1': resolution: { integrity: sha512-cPXh9hWIlVJMQkVk84aIvXuBB4uQQmFqZiacloFuGiP3ah1sbCxCosidXFDfqG8+6fO1oR2dTJTlsOy4VFmUfw== } engines: { node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0 } @@ -3667,6 +3850,15 @@ packages: node-notifier: optional: true + '@jest/reporters@29.7.0': + resolution: { integrity: sha512-DApq0KJbJOEzAFYjHADNNxAE3KbhxQB1y5Kplb5Waqw6zVbuWatSnMjE5gs8FUgEPmNsnZA3NCWl9NG0ia04Pg== } + engines: { node: ^14.15.0 || ^16.10.0 || >=18.0.0 } + peerDependencies: + node-notifier: ^8.0.1 || ^9.0.0 || ^10.0.0 + peerDependenciesMeta: + node-notifier: + optional: true + '@jest/schemas@28.1.3': resolution: { integrity: sha512-/l/VWsdt/aBXgjshLWOFyFt3IVdYypu5y2Wn2rOO1un6nkqIn8SLXzgIMYXFyYsRWDyF5EthmKJMIdJvk08grg== } engines: { node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0 } @@ -3679,6 +3871,10 @@ packages: resolution: { integrity: sha512-y9NIHUYF3PJRlHk98NdC/N1gl88BL08aQQgu4k4ZopQkCw9t9cV8mtl3TV8b/YCB8XaVTFrmUTAJvjsntDireg== } engines: { node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0 } + '@jest/source-map@29.6.3': + resolution: { integrity: sha512-MHjT95QuipcPrpLM+8JMSzFx6eHp5Bm+4XeFDJlwsvVBjmKNiIAvasGK2fxz2WbGRlnvqehFbh07MMa7n3YJnw== } + engines: { node: ^14.15.0 || ^16.10.0 || >=18.0.0 } + '@jest/test-result@27.5.1': resolution: { integrity: sha512-EW35l2RYFUcUQxFJz5Cv5MTOxlJIQs4I7gxzi2zVU7PJhOwfYq1MdC5nhSmYjX1gmMmLPvB3sIaC+BkcHRBfag== } engines: { node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0 } @@ -3687,14 +3883,26 @@ packages: resolution: { integrity: sha512-kZAkxnSE+FqE8YjW8gNuoVkkC9I7S1qmenl8sGcDOLropASP+BkcGKwhXoyqQuGOGeYY0y/ixjrd/iERpEXHNg== } engines: { node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0 } + '@jest/test-result@29.7.0': + resolution: { integrity: sha512-Fdx+tv6x1zlkJPcWXmMDAG2HBnaR9XPSd5aDWQVsfrZmLVT3lU1cwyxLgRmXR9yrq4NBoEm9BMsfgFzTQAbJYA== } + engines: { node: ^14.15.0 || ^16.10.0 || >=18.0.0 } + '@jest/test-sequencer@27.5.1': resolution: { integrity: sha512-LCheJF7WB2+9JuCS7VB/EmGIdQuhtqjRNI9A43idHv3E4KltCTsPsLxvdaubFHSYwY/fNjMWjl6vNRhDiN7vpQ== } engines: { node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0 } + '@jest/test-sequencer@29.7.0': + resolution: { integrity: sha512-GQwJ5WZVrKnOJuiYiAF52UNUJXgTZx1NHjFSEB0qEMmSZKAkdMoIzw/Cj6x6NF4AvV23AUqDpFzQkN/eYCYTxw== } + engines: { node: ^14.15.0 || ^16.10.0 || >=18.0.0 } + '@jest/transform@27.5.1': resolution: { integrity: sha512-ipON6WtYgl/1329g5AIJVbUuEh0wZVbdpGwC99Jw4LwuoBNS95MVphU6zOeD9pDkon+LLbFL7lOQRapbB8SCHw== } engines: { node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0 } + '@jest/transform@29.7.0': + resolution: { integrity: sha512-ok/BTPFzFKVMwO5eOHRrvnBVHdRy9IrsrW1GpMaQ9MCnilNLXQKmAX8s1YXDFaai9xJpac2ySzV0YeRRECr2Vw== } + engines: { node: ^14.15.0 || ^16.10.0 || >=18.0.0 } + '@jest/types@27.5.1': resolution: { integrity: sha512-Cx46iJ9QpwQTjIdq5VJu2QTMMs3QlEjI0x1QbBP5W1+nMzyc2XmimiRR/CbX9TO0cPTeUlxWMOu8mslYsJ8DEw== } engines: { node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0 } @@ -3755,6 +3963,13 @@ packages: peerDependencies: tslib: '2' + '@keyv/redis@4.3.3': + resolution: { integrity: sha512-J/uhvKu/Qfh11yMUs+9KdcGCLmWFd3vMxtDVQh2j9cOcnrpnM5jE1xU+K1/kI89czSVEdeMyqTC9gGNtwi3JEQ== } + engines: { node: '>= 18' } + + '@keyv/serialize@1.0.3': + resolution: { integrity: sha512-qnEovoOp5Np2JDGonIDL6Ayihw0RhnRh6vxPuHo4RDn1UOzwEo4AeIfpL6UGIrsceWrCMiVPgwRjbHu4vYFc3g== } + '@ladle/react-context@1.0.1': resolution: { integrity: sha512-xVQ8siyOEQG6e4Knibes1uA3PTyXnqiMmfSmd5pIbkzeDty8NCBtYHhTXSlfmcDNEsw/G8OzNWo4VbyQAVDl2A== } peerDependencies: @@ -3791,8 +4006,8 @@ packages: resolution: { integrity: sha512-ICSrSOT6FzSbR+xnbkP6BxXhuom1ViPRiy8K8KrL6bHbTiR5v1UnpskTWRpyhQS1GA6+3t1gp7XHxB5CZzLyqQ== } engines: { node: '>=18' } - '@langchain/community@0.3.37': - resolution: { integrity: sha512-Ifug3Gc6JHOFNWr0bxT1ie0AUKn3hWkZ4PG+EGqVz8MyeNr68lOhchY4oj51pQCC65KryfZ5CPhXrvbd5Il1GQ== } + '@langchain/community@0.3.40': + resolution: { integrity: sha512-UvpEebdFKJsjFBKeUOvvYHOEFsUcjZnyU1qNirtDajwjzTJlszXtv+Mq8F6w5mJsznpI9x7ZMNzAqydVxMG5hA== } engines: { node: '>=18' } peerDependencies: '@arcjet/redact': ^v1.0.0-alpha.23 @@ -3861,6 +4076,7 @@ packages: '@zilliz/milvus2-sdk-node': '>=2.3.5' apify-client: ^2.7.1 assemblyai: ^4.6.0 + azion: ^1.11.1 better-sqlite3: '>=9.4.0 <12.0.0' cassandra-driver: ^4.7.2 cborg: ^4.1.1 @@ -4050,6 +4266,8 @@ packages: optional: true assemblyai: optional: true + azion: + optional: true better-sqlite3: optional: true cassandra-driver: @@ -4240,12 +4458,6 @@ packages: peerDependencies: '@langchain/core': 0.3.37 - '@langchain/openai@0.4.4': - resolution: { integrity: sha512-UZybJeMd8+UX7Kn47kuFYfqKdBCeBUWNqDtmAr6ZUIMMnlsNIb6MkrEEhGgAEjGCpdT4CU8U/DyyddTz+JayOQ== } - engines: { node: '>=18' } - peerDependencies: - '@langchain/core': 0.3.37 - '@langchain/openai@0.5.6': resolution: { integrity: sha512-zN0iyJthPNmcefIBVybZwcTBgcqu/ElJFov42ZntxEncK4heOMAE9lkq9LQ5CaPU/SgrduibrM1oL57+tLUtaA== } engines: { node: '>=18' } @@ -4282,9 +4494,15 @@ packages: '@lezer/common@1.2.1': resolution: { integrity: sha512-yemX0ZD2xS/73llMZIK6KplkjIjf2EvAHcinDi/TfJ9hS25G0388+ClHt6/3but0oOxinTcQHJLDXh6w1crzFQ== } + '@lezer/css@1.1.8': + resolution: { integrity: sha512-7JhxupKuMBaWQKjQoLtzhGj83DdnZY9MckEOG5+/iLKNK2ZJqKc6hf6uc0HjwCX7Qlok44jBNqZhHKDhEhZYLA== } + '@lezer/highlight@1.2.1': resolution: { integrity: sha512-Z5duk4RN/3zuVO7Jq0pGLJ3qynpxUVsh7IbUbGj88+uV2ApSAn6kWg2au3iJb+0Zi7kKtqffIESgNcRXWZWmSA== } + '@lezer/html@1.3.10': + resolution: { integrity: sha512-dqpT8nISx/p9Do3AchvYGV3qYc4/rKr3IBZxlHmpIKam56P47RSHkSF5f13Vu9hebS1jM0HmtJIwLbWz1VIY6w== } + '@lezer/javascript@1.4.13': resolution: { integrity: sha512-5IBr8LIO3xJdJH1e9aj/ZNLE4LSbdsx25wFmGRAZsj2zSmwAYjx26JyU/BYOCpRQlu1jcv1z3vy4NB9+UkfRow== } @@ -4294,6 +4512,9 @@ packages: '@lezer/lr@1.4.0': resolution: { integrity: sha512-Wst46p51km8gH0ZUmeNrtpRYmdlRHUpN1DQd3GFAyKANi8WVz8c2jHYTf1CVScFaCjQw1iO3ZZdqGDxQPRErTg== } + '@lezer/markdown@1.3.0': + resolution: { integrity: sha512-ErbEQ15eowmJUyT095e9NJc3BI9yZ894fjSDtHftD0InkfUBGgnKSU6dvan9jqsZuNHg2+ag/1oyDRxNsENupQ== } + '@llamaindex/cloud@0.0.5': resolution: { integrity: sha512-8HBSiAZkmX1RvpEM2czEVKqMUCKk7uvMSiDpMGWlEj3MUKBYCh+r8E2TtVhZfU4TunEI7nJRMcVBfXDyFz6Lpw== } peerDependencies: @@ -4321,8 +4542,9 @@ packages: resolution: { integrity: sha512-4XVBpn/xtHkYdGNxPPAqsKZimGgmH/jVXiiGlw9x0iOBLC2bhknA5/X6+znRa9YforTmPtCDiM+euWui50VqdQ== } engines: { node: '>=18' } - '@mendable/firecrawl-js@0.0.28': - resolution: { integrity: sha512-Xa+ZbBQkoR/KHM1ZpvJBdLWSCdRoRGyllDNoVvhKxGv9qXZk9h/lBxbqp3Kc1Kg2L2JJnJCkmeaTUCAn8y33GA== } + '@mendable/firecrawl-js@1.25.1': + resolution: { integrity: sha512-i4MyViBXV9l4Z/gaY1tWXs74ZD4Oiea7xKUOzmYmC/AYKWEMrUuOhZHvdNJCOTHkTWq81xe4pYBQaGp+rtDPuA== } + engines: { node: '>=22.0.0' } '@microsoft/fetch-event-source@2.0.1': resolution: { integrity: sha512-W6CLUJ2eBMw3Rec70qrsEW0jOm/3twwJv21mrmj2yORiaVmVYGS4sSS5yUwvQc1ZlDLYGPnClVWmUUMagKNsfA== } @@ -4347,8 +4569,12 @@ packages: '@modelcontextprotocol/sdk@1.0.1': resolution: { integrity: sha512-slLdFaxQJ9AlRg+hw28iiTtGvShAOgOKXcD0F91nUcRYiOMuS9ZBYjcdNZRXW9G5JQ511GRTdUy1zQVZDpJ+4w== } - '@modelcontextprotocol/sdk@1.10.1': - resolution: { integrity: sha512-xNYdFdkJqEfIaTVP1gPKoEvluACHZsHZegIoICX8DM1o6Qf3G5u2BQJHmgd0n4YgRPqqK/u1ujQvrgAxxSJT9w== } + '@modelcontextprotocol/sdk@1.10.2': + resolution: { integrity: sha512-rb6AMp2DR4SN+kc6L1ta2NCpApyA9WYNx3CrTSZvGxq9wH71bRur+zRqPfg0vQ9mjywR7qZdX2RGHOPq3ss+tA== } + engines: { node: '>=18' } + + '@modelcontextprotocol/sdk@1.12.0': + resolution: { integrity: sha512-m//7RlINx1F3sz3KqwY1WWzVgTcYX52HYk4bJ1hkBXV3zccAEth+jRvG8DBRrdaQuRsPAJOx2MH3zaHNCKL7Zg== } engines: { node: '>=18' } '@modelcontextprotocol/server-brave-search@0.6.2': @@ -4641,6 +4867,10 @@ packages: '@nicolo-ribaudo/eslint-scope-5-internals@5.1.1-v1': resolution: { integrity: sha512-54/JRvkLIzzDWshCWfuhadfrfZVPiElY8Fcgmg1HroEly/EDSszzhBAsarCux+D/kOslTRquNzuyGSmUSTTHGg== } + '@noble/hashes@1.8.0': + resolution: { integrity: sha512-jCs9ldd7NwzpgXDIf6P3+NrHh9/sD6CQdxHyjQI+h/6rDNo88ypBxxz45UDuZHz9r3tNz7N/VInSVoVdtXEI4A== } + engines: { node: ^14.21.3 || >=16 } + '@nodelib/fs.scandir@2.1.5': resolution: { integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g== } engines: { node: '>= 8' } @@ -4805,12 +5035,16 @@ packages: resolution: { integrity: sha512-9HhEh5GqFrassUndqJsyW7a0PzfyWr2eV2xwzHLIS+wX3125+9HE9FMRAKmJRwxZhgZGwH3HNQQjoMGZqmOeVA== } engines: { node: '>=14' } + '@opentelemetry/api-logs@0.54.2': + resolution: { integrity: sha512-4MTVwwmLgUh5QrJnZpYo6YRO5IBLAggf2h8gWDblwRagDStY13aEvt7gGk3jewrMaPlHiF83fENhIx0HO97/cQ== } + engines: { node: '>=14' } + '@opentelemetry/api@1.9.0': resolution: { integrity: sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg== } engines: { node: '>=8.0.0' } - '@opentelemetry/auto-instrumentations-node@0.52.0': - resolution: { integrity: sha512-J9SgX7NOpTvQ7itvlOlHP3lTlsMWtVh5WQSHUSTlg2m3A9HlZBri2DtZ8QgNj8rYWe0EQxQ3TQ3H6vabfun4vw== } + '@opentelemetry/auto-instrumentations-node@0.52.1': + resolution: { integrity: sha512-4QaRTZifSoYnh27B3JA7z7YwE0Nwkd824pDeonAQVijeLLsenhZB1japualZ6mF9lY8VdQId9KkNsgmCGdJVNQ== } engines: { node: '>=14' } peerDependencies: '@opentelemetry/api': ^1.4.1 @@ -4821,32 +5055,26 @@ packages: peerDependencies: '@opentelemetry/api': '>=1.0.0 <1.10.0' - '@opentelemetry/core@1.26.0': - resolution: { integrity: sha512-1iKxXXE8415Cdv0yjG3G6hQnB5eVEsJce3QaawX8SjDn0mAS0ZM8fAbZZJD4ajvhC15cePvosSCut404KrIIvQ== } - engines: { node: '>=14' } - peerDependencies: - '@opentelemetry/api': '>=1.0.0 <1.10.0' - '@opentelemetry/core@1.27.0': resolution: { integrity: sha512-yQPKnK5e+76XuiqUH/gKyS8wv/7qITd5ln56QkBTf3uggr0VkXOXfcaAuG330UfdYu83wsyoBwqwxigpIG+Jkg== } engines: { node: '>=14' } peerDependencies: '@opentelemetry/api': '>=1.0.0 <1.10.0' - '@opentelemetry/exporter-logs-otlp-grpc@0.54.0': - resolution: { integrity: sha512-CQC9xl9p8EIvx2KggdM7yffbpmUArKjiqAcjTTTEvqE8kOOf71NSuBU0FXs14FU8vBGTUlsr3oI4vGeWF8FakA== } + '@opentelemetry/exporter-logs-otlp-grpc@0.54.2': + resolution: { integrity: sha512-MQNmV5r96+5n3axLFgNYtVy62x8Ru7VERZH3zgC50KDcIKWCiQT3vHOtzakhzd1Wq0HqOgu6bzKdwzneSoDrEQ== } engines: { node: '>=14' } peerDependencies: '@opentelemetry/api': ^1.3.0 - '@opentelemetry/exporter-logs-otlp-http@0.54.0': - resolution: { integrity: sha512-EX/5YPtFw5hugURWSmOtJEGsjphkwTRAiv2yay40ADCLEzajhI/tM3v/7hFCj+rm37sGFMNawpi3mGLvfKGexQ== } + '@opentelemetry/exporter-logs-otlp-http@0.54.2': + resolution: { integrity: sha512-wYeCSbX2XWX2wFslnfQ/YFUolO0fj2nUiGI7oEQWpLKSg40Lc4xOOW14X/EXOkCCijhP7bigo6nvyEQlxEVLjA== } engines: { node: '>=14' } peerDependencies: '@opentelemetry/api': ^1.3.0 - '@opentelemetry/exporter-logs-otlp-proto@0.54.0': - resolution: { integrity: sha512-Q8p1eLP6BGu26VdiR8qBiyufXTZimUl2kv6EwZZPLRU0CJWAFR562UOyUtDxbwQioQFq57DVjCd6mQWBvydAlg== } + '@opentelemetry/exporter-logs-otlp-proto@0.54.2': + resolution: { integrity: sha512-agrzFbSNmIy6dhkyg41ERlEDUDqkaUJj2n/tVRFp9Tl+6wyNVPsqmwU5RWJOXpyK+lYH/znv6A47VpTeJF0lrw== } engines: { node: '>=14' } peerDependencies: '@opentelemetry/api': ^1.3.0 @@ -4875,18 +5103,36 @@ packages: peerDependencies: '@opentelemetry/api': ^1.3.0 + '@opentelemetry/exporter-trace-otlp-grpc@0.54.2': + resolution: { integrity: sha512-tmxiCYhQdPrzwlM6O7VQeNP9PBjKhaiOo54wFxQFZQcoVaDiOOES4+6PwHU1eW+43mDsgdQHN5AHSRHVLe9jDA== } + engines: { node: '>=14' } + peerDependencies: + '@opentelemetry/api': ^1.3.0 + '@opentelemetry/exporter-trace-otlp-http@0.54.0': resolution: { integrity: sha512-00X6rtr6Ew59+MM9pPSH7Ww5ScpWKBLiBA49awbPqQuVL/Bp0qp7O1cTxKHgjWdNkhsELzJxAEYwuRnDGrMXyA== } engines: { node: '>=14' } peerDependencies: '@opentelemetry/api': ^1.3.0 + '@opentelemetry/exporter-trace-otlp-http@0.54.2': + resolution: { integrity: sha512-BgWKKyD/h2zpISdmYHN/sapwTjvt1P4p5yx4xeBV8XAEqh4OQUhOtSGFG80+nPQ1F8of3mKOT1DDoDbJp1u25w== } + engines: { node: '>=14' } + peerDependencies: + '@opentelemetry/api': ^1.3.0 + '@opentelemetry/exporter-trace-otlp-proto@0.54.0': resolution: { integrity: sha512-cpDQj5wl7G8pLu3lW94SnMpn0C85A9Ehe7+JBow2IL5DGPWXTkynFngMtCC3PpQzQgzlyOVe0MVZfoBB3M5ECA== } engines: { node: '>=14' } peerDependencies: '@opentelemetry/api': ^1.3.0 + '@opentelemetry/exporter-trace-otlp-proto@0.54.2': + resolution: { integrity: sha512-XSmm1N2wAhoWDXP1q/N6kpLebWaxl6VIADv4WA5QWKHLRpF3gLz5NAWNJBR8ygsvv8jQcrwnXgwfnJ18H3v1fg== } + engines: { node: '>=14' } + peerDependencies: + '@opentelemetry/api': ^1.3.0 + '@opentelemetry/exporter-zipkin@1.27.0': resolution: { integrity: sha512-eGMY3s4QprspFZojqsuQyQpWNFpo+oNVE/aosTbtvAlrJBAlvXcwwsOROOHOd8Y9lkU4i0FpQW482rcXkgwCSw== } engines: { node: '>=14' } @@ -4899,14 +5145,14 @@ packages: peerDependencies: '@opentelemetry/api': ^1.3.0 - '@opentelemetry/instrumentation-aws-lambda@0.46.0': - resolution: { integrity: sha512-rNmhTC1e1qQD4jw+TZSHlpLYNhrkbKA0P5rlqPpTVHqZXHQctu9+dity2lLBh4DlFKt4p/ibVDLVDoBqjvetKA== } + '@opentelemetry/instrumentation-aws-lambda@0.47.0': + resolution: { integrity: sha512-0BidKDPziHWGl5mnpLuh7ob1X3KpR0UN3QcJkcxIsOMylBbMMp9EoB55dHsTMoNO7bx2uyeY0iirEuTchjF1gQ== } engines: { node: '>=14' } peerDependencies: '@opentelemetry/api': ^1.3.0 - '@opentelemetry/instrumentation-aws-sdk@0.45.0': - resolution: { integrity: sha512-3EGgC0LFZuFfXcOeslhXHhsiInVhhN046YQsYIPflsicAk7v0wN946sZKWuerEfmqx/kFXOsbOeI1SkkTRmqWQ== } + '@opentelemetry/instrumentation-aws-sdk@0.46.0': + resolution: { integrity: sha512-EyxGQVYhgY8OI4/CKzqamUswiEVlua6DJcsmkeNSykZrDGs78jPfssbqoMQGetywHWPZBRVJN4Ba/7aB5iLHBA== } engines: { node: '>=14' } peerDependencies: '@opentelemetry/api': ^1.3.0 @@ -4977,8 +5223,8 @@ packages: peerDependencies: '@opentelemetry/api': ^1.3.0 - '@opentelemetry/instrumentation-grpc@0.54.0': - resolution: { integrity: sha512-IwLwAf1uC6I5lYjUxfvG0jFuppqNuaBIiaDxYFHMWeRX1Rejh4eqtQi2u+VVtSOHsCn2sRnS9hOxQ2w7+zzPLw== } + '@opentelemetry/instrumentation-grpc@0.54.2': + resolution: { integrity: sha512-KhSzerCaaqVH2zfDro7nTunWUZXt1pQISQpE83LuQTOKGk7mN3G60T1wliQ3Qdg0X3UUuhCXEC7u6IAVfDxkUQ== } engines: { node: '>=14' } peerDependencies: '@opentelemetry/api': ^1.3.0 @@ -4989,8 +5235,8 @@ packages: peerDependencies: '@opentelemetry/api': ^1.3.0 - '@opentelemetry/instrumentation-http@0.54.0': - resolution: { integrity: sha512-ovl0UrL+vGpi0O7fdZ1mHRdiQkuv6NGMRBRKZZygVCUFNXdoqTpvJRRbTYih5U5FC+PHIFssEordmlblRCaGUg== } + '@opentelemetry/instrumentation-http@0.54.2': + resolution: { integrity: sha512-mABjJ34UcU32pg8g18L9xBh0U3JON/2F6/57BYYy8AZJp2a71lZjcKr0T00pICoic50TW5HvcTrmyfMil+AiXQ== } engines: { node: '>=14' } peerDependencies: '@opentelemetry/api': ^1.3.0 @@ -5043,8 +5289,8 @@ packages: peerDependencies: '@opentelemetry/api': ^1.3.0 - '@opentelemetry/instrumentation-mysql2@0.42.0': - resolution: { integrity: sha512-CQqOjCbHwEnaC+Bd6Sms+82iJkSbPpd7jD7Jwif7q8qXo6yrKLVDYDVK+zKbfnmQtu2xHaHj+xiq4tyjb3sMfg== } + '@opentelemetry/instrumentation-mysql2@0.42.1': + resolution: { integrity: sha512-5hOQbFSpqsgDLaqIeWZNbSWB6XdwN+aBjoCIe60lmGG86zeNXu9I6l1kEckRb+Gy0i7zrt0Tk8S62zsOSZ8l7Q== } engines: { node: '>=14' } peerDependencies: '@opentelemetry/api': ^1.3.0 @@ -5067,8 +5313,8 @@ packages: peerDependencies: '@opentelemetry/api': ^1.3.0 - '@opentelemetry/instrumentation-pg@0.47.0': - resolution: { integrity: sha512-aKu5PCeUv3S8s1wq60JZ2o3DWV2wqvO7WAktjmkx5wXd2+tZRfyDCKFHbP90QuDG1HDzjJ138Ob4d4rJdPETCQ== } + '@opentelemetry/instrumentation-pg@0.47.1': + resolution: { integrity: sha512-qIcydMBVlKtAyFQWYunjqvFMVqIGvxGMXISrdLuSbcCqico9QKhK7bF5wzsotjGwHcGnc7q5kRqSL7j+LnY1Cw== } engines: { node: '>=14' } peerDependencies: '@opentelemetry/api': ^1.3.0 @@ -5115,8 +5361,8 @@ packages: peerDependencies: '@opentelemetry/api': ^1.3.0 - '@opentelemetry/instrumentation-undici@0.7.0': - resolution: { integrity: sha512-1AAqbVt1QOLgnc9DEkHS2R/0FIPI74ud5qgitwP9sVYzRg6e66bPSoAIARCyuANJrWCUrfgI69vLTfRxhBM+3A== } + '@opentelemetry/instrumentation-undici@0.7.1': + resolution: { integrity: sha512-sIl4zrRDP7pR+2Pmdm9XJQULMKiUmvZze2cEW6gUz7TXCEaYmJ+vNMdd7qgeRo8C7AMm+T08mptobFVKPzdz+A== } engines: { node: '>=14' } peerDependencies: '@opentelemetry/api': ^1.7.0 @@ -5133,8 +5379,8 @@ packages: peerDependencies: '@opentelemetry/api': ^1.3.0 - '@opentelemetry/instrumentation@0.54.0': - resolution: { integrity: sha512-B0Ydo9g9ehgNHwtpc97XivEzjz0XBKR6iQ83NTENIxEEf5NHE0otZQuZLgDdey1XNk+bP1cfRpIkSFWM5YlSyg== } + '@opentelemetry/instrumentation@0.54.2': + resolution: { integrity: sha512-go6zpOVoZVztT9r1aPd79Fr3OWiD4N24bCPJsIKkBses8oyFo12F/Ew3UBTdIu6hsW4HC4MVEJygG6TEyJI/lg== } engines: { node: '>=14' } peerDependencies: '@opentelemetry/api': ^1.3.0 @@ -5145,29 +5391,41 @@ packages: peerDependencies: '@opentelemetry/api': ^1.3.0 + '@opentelemetry/otlp-exporter-base@0.54.2': + resolution: { integrity: sha512-NrNyxu6R/bGAwanhz1HI0aJWKR6xUED4TjCH4iWMlAfyRukGbI9Kt/Akd2sYLwRKNhfS+sKetKGCUQPMDyYYMA== } + engines: { node: '>=14' } + peerDependencies: + '@opentelemetry/api': ^1.3.0 + '@opentelemetry/otlp-grpc-exporter-base@0.54.0': resolution: { integrity: sha512-Yl2Dw0jlRWisEia9Hv/N8u2JLITCvzA6gAIKEvxpEu6nwHEftD2WhTJMIclkTtfmSW0rLmEEXymwmboG4xDN0Q== } engines: { node: '>=14' } peerDependencies: '@opentelemetry/api': ^1.3.0 + '@opentelemetry/otlp-grpc-exporter-base@0.54.2': + resolution: { integrity: sha512-HZtACQuLhgDcgNa9arGnVVGV28sSGQ+iwRgICWikFKiVxUsoWffqBvTxPa6G3DUTg5R+up97j/zxubEyxSAOHg== } + engines: { node: '>=14' } + peerDependencies: + '@opentelemetry/api': ^1.3.0 + '@opentelemetry/otlp-transformer@0.54.0': resolution: { integrity: sha512-jRexIASQQzdK4AjfNIBfn94itAq4Q8EXR9d3b/OVbhd3kKQKvMr7GkxYDjbeTbY7hHCOLcLfJ3dpYQYGOe8qOQ== } engines: { node: '>=14' } peerDependencies: '@opentelemetry/api': ^1.3.0 - '@opentelemetry/propagation-utils@0.30.12': - resolution: { integrity: sha512-bgab3q/4dYUutUpQCEaSDa+mLoQJG3vJKeSiGuhM4iZaSpkz8ov0fs1MGil5PfxCo6Hhw3bB3bFYhUtnsfT/Pg== } + '@opentelemetry/otlp-transformer@0.54.2': + resolution: { integrity: sha512-2tIjahJlMRRUz0A2SeE+qBkeBXBFkSjR0wqJ08kuOqaL8HNGan5iZf+A8cfrfmZzPUuMKCyY9I+okzFuFs6gKQ== } engines: { node: '>=14' } peerDependencies: - '@opentelemetry/api': ^1.0.0 + '@opentelemetry/api': ^1.3.0 - '@opentelemetry/propagator-aws-xray@1.26.0': - resolution: { integrity: sha512-Sex+JyEZ/xX328TArBqQjh1NZSfNyw5NdASUIi9hnPsnMBMSBaDe7B9JRnXv0swz7niNyAnXa6MY7yOCV76EvA== } + '@opentelemetry/propagation-utils@0.30.12': + resolution: { integrity: sha512-bgab3q/4dYUutUpQCEaSDa+mLoQJG3vJKeSiGuhM4iZaSpkz8ov0fs1MGil5PfxCo6Hhw3bB3bFYhUtnsfT/Pg== } engines: { node: '>=14' } peerDependencies: - '@opentelemetry/api': '>=1.0.0 <1.10.0' + '@opentelemetry/api': ^1.0.0 '@opentelemetry/propagator-b3@1.27.0': resolution: { integrity: sha512-pTsko3gnMioe3FeWcwTQR3omo5C35tYsKKwjgTCTVCgd3EOWL9BZrMfgLBmszrwXABDfUrlAEFN/0W0FfQGynQ== } @@ -5227,14 +5485,20 @@ packages: peerDependencies: '@opentelemetry/api': '>=1.4.0 <1.10.0' + '@opentelemetry/sdk-logs@0.54.2': + resolution: { integrity: sha512-yIbYqDLS/AtBbPjCjh6eSToGNRMqW2VR8RrKEy+G+J7dFG7pKoptTH5T+XlKPleP9NY8JZYIpgJBlI+Osi0rFw== } + engines: { node: '>=14' } + peerDependencies: + '@opentelemetry/api': '>=1.4.0 <1.10.0' + '@opentelemetry/sdk-metrics@1.27.0': resolution: { integrity: sha512-JzWgzlutoXCydhHWIbLg+r76m+m3ncqvkCcsswXAQ4gqKS+LOHKhq+t6fx1zNytvLuaOUBur7EvWxECc4jPQKg== } engines: { node: '>=14' } peerDependencies: '@opentelemetry/api': '>=1.3.0 <1.10.0' - '@opentelemetry/sdk-node@0.54.0': - resolution: { integrity: sha512-F0mdwb4WPFJNypcmkxQnj3sIfh/73zkBgYePXMK8ghsBwYw4+PgM3/85WT6NzNUeOvWtiXacx5CFft2o7rGW3w== } + '@opentelemetry/sdk-node@0.54.2': + resolution: { integrity: sha512-afn8GBpA7Gb55aU0LUxIQ+oe6QxLhsf+Te9iw12Non3ZAspzdoCcfz5+hqecwpuVpEDdnj5iSalF7VVaL2pDeg== } engines: { node: '>=14' } peerDependencies: '@opentelemetry/api': '>=1.3.0 <1.10.0' @@ -5261,6 +5525,9 @@ packages: peerDependencies: '@opentelemetry/api': ^1.1.0 + '@paralleldrive/cuid2@2.2.2': + resolution: { integrity: sha512-ZOBkgDwEdoYVlSeRbYYXs0S9MejQofiVYoTbKzy/6GQa39/q5tQU2IX46+shYnUkpEl3wc+J6wRlar7r2EK2xA== } + '@petamoriken/float16@3.8.7': resolution: { integrity: sha512-/Ri4xDDpe12NT6Ex/DRgHzLlobiQXEW/hmG08w1wj/YU7hLemk97c+zHQFp0iZQ9r7YqgLEXZR2sls4HxBf9NA== } @@ -5405,6 +5672,10 @@ packages: resolution: { integrity: sha512-YGn0GqsRBFUQxklhY7v562VMOP0DcmlrHHs3IV1mFE3cbxe31IITUkqhBcIhVSI/2JqtWAJXg5mjV4aU+zD0HA== } engines: { node: '>=14' } + '@redis/client@1.6.0': + resolution: { integrity: sha512-aR0uffYI700OEEH4gYnitAnv3vzVGXCFvYfdpu/CJKvk4pHfLPEy/JSZyrpQ+15WhXe1yJRXLtfQ84s4mEXnPg== } + engines: { node: '>=14' } + '@redis/graph@1.1.1': resolution: { integrity: sha512-FEMTcTHZozZciLRl6GiiIB4zGm5z5F3F6a6FZCyrfxdKOhFlGkiAqlexWMBzCi4DcRoyiOsuLfW+cjlGWyExOw== } peerDependencies: @@ -5415,16 +5686,42 @@ packages: peerDependencies: '@redis/client': ^1.0.0 + '@redis/json@1.0.7': + resolution: { integrity: sha512-6UyXfjVaTBTJtKNG4/9Z8PSpKE6XgSyEb8iwaqDcy+uKrd/DGYHTWkUdnQDyzm727V7p21WUMhsqz5oy65kPcQ== } + peerDependencies: + '@redis/client': ^1.0.0 + '@redis/search@1.1.6': resolution: { integrity: sha512-mZXCxbTYKBQ3M2lZnEddwEAks0Kc7nauire8q20oA0oA/LoA+E/b5Y5KZn232ztPb1FkIGqo12vh3Lf+Vw5iTw== } peerDependencies: '@redis/client': ^1.0.0 + '@redis/search@1.2.0': + resolution: { integrity: sha512-tYoDBbtqOVigEDMAcTGsRlMycIIjwMCgD8eR2t0NANeQmgK/lvxNAvYyb6bZDD4frHRhIHkJu2TBRvB0ERkOmw== } + peerDependencies: + '@redis/client': ^1.0.0 + '@redis/time-series@1.0.5': resolution: { integrity: sha512-IFjIgTusQym2B5IZJG3XKr5llka7ey84fw/NOYqESP5WUfQs9zz1ww/9+qoz4ka/S6KcGBodzlCeZ5UImKbscg== } peerDependencies: '@redis/client': ^1.0.0 + '@redis/time-series@1.1.0': + resolution: { integrity: sha512-c1Q99M5ljsIuc4YdaCwfUEXsofakb9c8+Zse2qxTadu8TalLXuAESzLvFAvNVbkmSlvlzIQOLpBCmWI9wTOt+g== } + peerDependencies: + '@redis/client': ^1.0.0 + + '@reduxjs/toolkit@2.2.7': + resolution: { integrity: sha512-faI3cZbSdFb8yv9dhDTmGwclW0vk0z5o1cia+kf7gCbaCwHI5e+7tP57mJUv22pNcNbeA62GSrPpfrUfdXcQ6g== } + peerDependencies: + react: ^16.9.0 || ^17.0.0 || ^18 + react-redux: ^7.2.1 || ^8.1.3 || ^9.0.0 + peerDependenciesMeta: + react: + optional: true + react-redux: + optional: true + '@remirror/core-constants@3.0.0': resolution: { integrity: sha512-42aWfPrimMfDKDi4YegyS7x+/0tlzaqwPQCULLanv3DMIlu96KTJR0fM5isWX2UViOqlGnX6YFgqWepcX+XMNg== } @@ -5598,6 +5895,12 @@ packages: '@sinonjs/commons@1.8.6': resolution: { integrity: sha512-Ky+XkAkqPZSm3NLBeUng77EBQl3cmeJhITaGHdYH8kjVB+aun3S4XBRti2zt17mtt0mIUDiNxYeoJm6drVvBJQ== } + '@sinonjs/commons@3.0.1': + resolution: { integrity: sha512-K3mCHKQ9sVh8o1C9cxkwxaOmXoAMlDxC1mYyHrjqOWEcBjYr76t96zL2zlj5dUGZ3HSw240X1qgH3Mjf1yJWpQ== } + + '@sinonjs/fake-timers@10.3.0': + resolution: { integrity: sha512-V4BG07kuYSUkTCSBHG8G8TNhM+F19jXFWnQtzj+we8DrkpSBCee9Z3Ms8yiGer/dlmhe35/Xdgyo3/0rQKg7YA== } + '@sinonjs/fake-timers@8.1.0': resolution: { integrity: sha512-OAPJUAtgeINhh/TAlUID4QTs53Njm7xzddaVlEs/SXwgtiD1tW22zAB/W1wdqfrpmikgaWQ9Fw6Ws+hsiRm5Vg== } @@ -5605,10 +5908,6 @@ packages: resolution: { integrity: sha512-66HO817oIZ2otLIqy06R5muapqZjkgF1jfU0wyNko8cuqZNu8nbS9ljlhcRYw/M/uWRJzB9ih81DLSHhYbBLlQ== } engines: { node: '>=14.0.0' } - '@smithy/abort-controller@4.0.0': - resolution: { integrity: sha512-xFNL1ZfluscKiVI0qlPEnu7pL1UgNNIzQdjTPkaO7JCJtIkbArPYNtqbxohuNaQdksJ01Tn1wLbDA5oIp62P8w== } - engines: { node: '>=18.0.0' } - '@smithy/abort-controller@4.0.1': resolution: { integrity: sha512-fiUIYgIgRjMWznk6iLJz35K2YxSLHzLBA/RC6lBrKfQ8fHbPfvk7Pk9UvpKoHgJjI18MnbPuEju53zcVy6KF1g== } engines: { node: '>=18.0.0' } @@ -5623,10 +5922,6 @@ packages: resolution: { integrity: sha512-LcBB5JQC3Tx2ZExIJzfvWaajhFIwHrUNQeqxhred2r5nnqrdly9uoCrvM1sxOOdghYuWWm2Kr8tBCDOmxsgeTA== } engines: { node: '>=14.0.0' } - '@smithy/config-resolver@4.0.0': - resolution: { integrity: sha512-29pIDlUY/a9+ChJPAarPiD9cU8fBtBh0wFnmnhj7j5AhgMzc+uyXdfzmziH6xx2jzw54waSP3HfnFkTANZuPYA== } - engines: { node: '>=18.0.0' } - '@smithy/config-resolver@4.0.1': resolution: { integrity: sha512-Igfg8lKu3dRVkTSEm98QpZUvKEOa71jDX4vKRcvJVyRc3UgN3j7vFMf0s7xLQhYmKa8kyJGQgUJDOV5V3neVlQ== } engines: { node: '>=18.0.0' } @@ -5635,8 +5930,8 @@ packages: resolution: { integrity: sha512-zHrrstOO78g+/rOJoHi4j3mGUBtsljRhcKNzloWPv1XIwgcFUi+F1YFKr2qPQ3z7Ls5dNc4L2SPrVarNFIQqog== } engines: { node: '>=14.0.0' } - '@smithy/core@3.0.0': - resolution: { integrity: sha512-pKaas7RWvPljJ8uByCeBa10rtbVJCy4N/Fr7OSPxFezcyG0SQuXWnESZqzXj7m2+A+kPzG6fKyP4wrKidl2Ikg== } + '@smithy/core@3.1.0': + resolution: { integrity: sha512-swFv0wQiK7TGHeuAp6lfF5Kw1dHWsTrCuc+yh4Kh05gEShjsE2RUxHucEerR9ih9JITNtaHcSpUThn5Y/vDw0A== } engines: { node: '>=18.0.0' } '@smithy/core@3.1.5': @@ -5647,10 +5942,6 @@ packages: resolution: { integrity: sha512-+xQe4Pite0kdk9qn0Vyw5BRVh0iSlj+T4TEKRXr4E1wZKtVgIzGlkCrfICSjiPVFkPxk4jMpVboMYdEiiA88/w== } engines: { node: '>=14.0.0' } - '@smithy/credential-provider-imds@4.0.0': - resolution: { integrity: sha512-+hTShyZHiq2AVFOxJja3k6O17DKU6TaZbwr2y1OH5HQtUw2a+7O3mMR+10LVmc39ef72SAj+uFX0IW9rJGaLQQ== } - engines: { node: '>=18.0.0' } - '@smithy/credential-provider-imds@4.0.1': resolution: { integrity: sha512-l/qdInaDq1Zpznpmev/+52QomsJNZ3JkTl5yrTl02V6NBgJOQ4LY0SFw/8zsMwj3tLe8vqiIuwF6nxaEwgf6mg== } engines: { node: '>=18.0.0' } @@ -5697,10 +5988,6 @@ packages: '@smithy/fetch-http-handler@2.4.4': resolution: { integrity: sha512-DSUtmsnIx26tPuyyrK49dk2DAhPgEw6xRW7V62nMHIB5dk3NqhGnwcKO2fMdt/l3NUVgia34ZsSJA8bD+3nh7g== } - '@smithy/fetch-http-handler@5.0.0': - resolution: { integrity: sha512-jUEq+4056uqsDLRqQb1fm48rrSMBYcBxVvODfiP37ORcV5n9xWJQsINWcIffyYxWTM5K0Y/GOfhSQGDtWpAPpQ== } - engines: { node: '>=18.0.0' } - '@smithy/fetch-http-handler@5.0.1': resolution: { integrity: sha512-3aS+fP28urrMW2KTjb6z9iFow6jO8n3MFfineGbndvzGZit3taZhKWtTorf+Gp5RpFDDafeHlhfsGlDCXvUnJA== } engines: { node: '>=18.0.0' } @@ -5712,10 +5999,6 @@ packages: resolution: { integrity: sha512-uvCcpDLXaTTL0X/9ezF8T8sS77UglTfZVQaUOBiCvR0QydeSyio3t0Hj3QooVdyFsKTubR8gCk/ubLk3vAyDng== } engines: { node: '>=14.0.0' } - '@smithy/hash-node@4.0.0': - resolution: { integrity: sha512-25OxGYGnG3JPEOTk4iFE03bfmoC6GXUQ4L13z4cNdsS3mkncH22AGSDRfKwwEqutNUxXQZWVy9f72Fm59C9qlg== } - engines: { node: '>=18.0.0' } - '@smithy/hash-node@4.0.1': resolution: { integrity: sha512-TJ6oZS+3r2Xu4emVse1YPB3Dq3d8RkZDKcPr71Nj/lJsdAP1c7oFzYqEn1IBc915TsgLl2xIJNuxCz+gLbLE0w== } engines: { node: '>=18.0.0' } @@ -5727,10 +6010,6 @@ packages: '@smithy/invalid-dependency@2.1.4': resolution: { integrity: sha512-QzlNBl6jt3nb9jNnE51wTegReVvUdozyMMrFEyb/rc6AzPID1O+qMJYjAAoNw098y0CZVfCpEnoK2+mfBOd8XA== } - '@smithy/invalid-dependency@4.0.0': - resolution: { integrity: sha512-0GTyet02HX/sPctEhOExY+3HI7hwkVwOoJg0XnItTJ+Xw7JMuL9FOxALTmKVIV6+wg0kF6veLeg72hVSbD9UCw== } - engines: { node: '>=18.0.0' } - '@smithy/invalid-dependency@4.0.1': resolution: { integrity: sha512-gdudFPf4QRQ5pzj7HEnu6FhKRi61BfH/Gk5Yf6O0KiSbr1LlVhgjThcvjdu658VE6Nve8vaIWB8/fodmS1rBPQ== } engines: { node: '>=18.0.0' } @@ -5750,10 +6029,6 @@ packages: resolution: { integrity: sha512-C6VRwfcr0w9qRFhDGCpWMVhlEIBFlmlPRP1aX9Cv9xDj9SUwlDrNvoV1oP1vjRYuLxCDgovBBynCwwcluS2wLw== } engines: { node: '>=14.0.0' } - '@smithy/middleware-content-length@4.0.0': - resolution: { integrity: sha512-nM1RJqLwkSCidumGK8WwNEZ0a0D/4LkwqdPna+QmHrdPoAK6WGLyZFosdMpsAW1OIbDLWGa+r37Mo4Vth4S4kQ== } - engines: { node: '>=18.0.0' } - '@smithy/middleware-content-length@4.0.1': resolution: { integrity: sha512-OGXo7w5EkB5pPiac7KNzVtfCW2vKBTZNuCctn++TTSOMpe6RZO/n6WEC1AxJINn3+vWLKW49uad3lo/u0WJ9oQ== } engines: { node: '>=18.0.0' } @@ -5762,8 +6037,8 @@ packages: resolution: { integrity: sha512-AsXtUXHPOAS0EGZUSFOsVJvc7p0KL29PGkLxLfycPOcFVLru/oinYB6yvyL73ZZPX2OB8sMYUMrj7eH2kI7V/w== } engines: { node: '>=14.0.0' } - '@smithy/middleware-endpoint@4.0.0': - resolution: { integrity: sha512-/f6z5SqUurmqemhBZNhM0c+C7QW0AY/zJpic//sbdu26q98HSPAI/xvzStjYq+UhtWeAe/jaX6gamdL/2r3W1g== } + '@smithy/middleware-endpoint@4.0.1': + resolution: { integrity: sha512-hCCOPu9+sRI7Wj0rZKKnGylKXBEd9cQJetzjQqe8cT4PWvtQAbvNVa6cgAONiZg9m8LaXtP9/waxm3C3eO4hiw== } engines: { node: '>=18.0.0' } '@smithy/middleware-endpoint@4.0.6': @@ -5774,8 +6049,8 @@ packages: resolution: { integrity: sha512-khpSV0NxqMHfa06kfG4WYv+978sVvfTFmn0hIFKKwOXtIxyYtPKiQWFT4nnwZD07fGdYGbtCBu3YALc8SsA5mA== } engines: { node: '>=14.0.0' } - '@smithy/middleware-retry@4.0.0': - resolution: { integrity: sha512-K6tsFp3Ik44H3694a+LWoXLV8mqy8zn6/vTw2feU72MaIzi51EHMVNNxxpL6e2GI6oxw8FFRGWgGn8+wQRrHZQ== } + '@smithy/middleware-retry@4.0.2': + resolution: { integrity: sha512-cJoyDPcpxu84QcFOCgh+ehDm+OjuOLHDQdkVYT898KIXDpEDrjQB3p40EeQNCsT5d36y10yoJe3f/aADoTBXSg== } engines: { node: '>=18.0.0' } '@smithy/middleware-retry@4.0.7': @@ -5786,8 +6061,8 @@ packages: resolution: { integrity: sha512-VAWRWqnNjgccebndpyK94om4ZTYzXLQxUmNCXYzM/3O9MTfQjTNBgtFtQwyIIez6z7LWcCsXmnKVIOE9mLqAHQ== } engines: { node: '>=14.0.0' } - '@smithy/middleware-serde@4.0.0': - resolution: { integrity: sha512-aW4Zo8Cm988RCvhysErzqrQ4YPKgZFhajvgPoZnsWIDaZfT419J17Ahr13Lul3kqGad2dCz7YOrXd7r+UAEj/w== } + '@smithy/middleware-serde@4.0.1': + resolution: { integrity: sha512-Fh0E2SOF+S+P1+CsgKyiBInAt3o2b6Qk7YOp2W0Qx2XnfTdfMuSDKUEcnrtpxCzgKJnqXeLUZYqtThaP0VGqtA== } engines: { node: '>=18.0.0' } '@smithy/middleware-serde@4.0.2': @@ -5798,10 +6073,6 @@ packages: resolution: { integrity: sha512-Qqs2ba8Ax1rGKOSGJS2JN23fhhox2WMdRuzx0NYHtXzhxbJOIMmz9uQY6Hf4PY8FPteBPp1+h0j5Fmr+oW12sg== } engines: { node: '>=14.0.0' } - '@smithy/middleware-stack@4.0.0': - resolution: { integrity: sha512-4NFaX88RmgVrCyJv/3RsSdqMwxzI/EQa8nvhUDVxmLUMRS2JUdHnliD6IwKuqIwIzz+E1aZK3EhSHUM4HXp3ww== } - engines: { node: '>=18.0.0' } - '@smithy/middleware-stack@4.0.1': resolution: { integrity: sha512-dHwDmrtR/ln8UTHpaIavRSzeIk5+YZTBtLnKwDW3G2t6nAupCiQUvNzNoHBpik63fwUaJPtlnMzXbQrNFWssIA== } engines: { node: '>=18.0.0' } @@ -5810,10 +6081,6 @@ packages: resolution: { integrity: sha512-CxPf2CXhjO79IypHJLBATB66Dw6suvr1Yc2ccY39hpR6wdse3pZ3E8RF83SODiNH0Wjmkd0ze4OF8exugEixgA== } engines: { node: '>=14.0.0' } - '@smithy/node-config-provider@4.0.0': - resolution: { integrity: sha512-Crp9rg1ewjqgM2i7pWSpNhfbBa0usyKGDVQLEXTOpu6trFqq3BFLLCgbCE1S18h6mxqKnOqUONq3nWOxUk75XA== } - engines: { node: '>=18.0.0' } - '@smithy/node-config-provider@4.0.1': resolution: { integrity: sha512-8mRTjvCtVET8+rxvmzRNRR0hH2JjV0DFOmwXPrISmTIJEfnCBugpYYGAsCj8t41qd+RB5gbheSQ/6aKZCQvFLQ== } engines: { node: '>=18.0.0' } @@ -5822,8 +6089,8 @@ packages: resolution: { integrity: sha512-yrj3c1g145uiK5io+1UPbJAHo8BSGORkBzrmzvAsOmBKb+1p3jmM8ZwNLDH/HTTxVLm9iM5rMszx+iAh1HUC4Q== } engines: { node: '>=14.0.0' } - '@smithy/node-http-handler@4.0.0': - resolution: { integrity: sha512-WvumtEaFyxaI95zmj6eYlF/vCFCKNyru3P/UUHCUS9BjvajUtNckH2cY3bBfi+qqMPX5gha4g26lcOlE/wPz/Q== } + '@smithy/node-http-handler@4.0.1': + resolution: { integrity: sha512-ddQc7tvXiVLC5c3QKraGWde761KSk+mboCheZoWtuqnXh5l0WKyFy3NfDIM/dsKrI9HlLVH/21pi9wWK2gUFFA== } engines: { node: '>=18.0.0' } '@smithy/node-http-handler@4.0.3': @@ -5834,10 +6101,6 @@ packages: resolution: { integrity: sha512-nWaY/MImj1BiXZ9WY65h45dcxOx8pl06KYoHxwojDxDL+Q9yLU1YnZpgv8zsHhEftlj9KhePENjQTlNowWVyug== } engines: { node: '>=14.0.0' } - '@smithy/property-provider@4.0.0': - resolution: { integrity: sha512-AJSvY1k3SdM0stGrIjL8/FIjXO7X9I7KkznXDmr76RGz+yvaDHLsLm2hSHyzAlmwEQnHaafSU2dwaV0JcnR/4w== } - engines: { node: '>=18.0.0' } - '@smithy/property-provider@4.0.1': resolution: { integrity: sha512-o+VRiwC2cgmk/WFV0jaETGOtX16VNPp2bSQEzu0whbReqE1BMqsP2ami2Vi3cbGVdKu1kq9gQkDAGKbt0WOHAQ== } engines: { node: '>=18.0.0' } @@ -5846,10 +6109,6 @@ packages: resolution: { integrity: sha512-xYBlllOQcOuLoxzhF2u8kRHhIFGQpDeTQj/dBSnw4kfI29WMKL5RnW1m9YjnJAJ49miuIvrkJR+gW5bCQ+Mchw== } engines: { node: '>=14.0.0' } - '@smithy/protocol-http@5.0.0': - resolution: { integrity: sha512-laAcIHWq9GQ5VdAS71DUrCj5HUHZ/89Ee+HRTLhFR5/E3toBlnZfPG+kqBajwfEB5aSdRuKslfzl5Dzrn3pr8A== } - engines: { node: '>=18.0.0' } - '@smithy/protocol-http@5.0.1': resolution: { integrity: sha512-TE4cpj49jJNB/oHyh/cRVEgNZaoPaxd4vteJNB0yGidOCVR0jCw/hjPVsT8Q8FRmj8Bd3bFZt8Dh7xGCT+xMBQ== } engines: { node: '>=18.0.0' } @@ -5858,10 +6117,6 @@ packages: resolution: { integrity: sha512-LXSL0J/nRWvGT+jIj+Fip3j0J1ZmHkUyBFRzg/4SmPNCLeDrtVu7ptKOnTboPsFZu5BxmpYok3kJuQzzRdrhbw== } engines: { node: '>=14.0.0' } - '@smithy/querystring-builder@4.0.0': - resolution: { integrity: sha512-kMqPDRf+/hwm+Dmk8AQCaYTJxNWWpNdJJteeMm0jwDbmRDqSqHQ7oLEVzvOnbWJu1poVtOhv6v7jsbyx9JASsw== } - engines: { node: '>=18.0.0' } - '@smithy/querystring-builder@4.0.1': resolution: { integrity: sha512-wU87iWZoCbcqrwszsOewEIuq+SU2mSoBE2CcsLwE0I19m0B2gOJr1MVjxWcDQYOzHbR1xCk7AcOBbGFUYOKvdg== } engines: { node: '>=18.0.0' } @@ -5870,10 +6125,6 @@ packages: resolution: { integrity: sha512-U2b8olKXgZAs0eRo7Op11jTNmmcC/sqYmsA7vN6A+jkGnDvJlEl7AetUegbBzU8q3D6WzC5rhR/joIy8tXPzIg== } engines: { node: '>=14.0.0' } - '@smithy/querystring-parser@4.0.0': - resolution: { integrity: sha512-SbogL1PNEmm28ya0eK2S0EZEbYwe0qpaqSGrODm+uYS6dQ7pekPLVNXjBRuuLIAT26ZF2wTsp6X7AVRBNZd8qw== } - engines: { node: '>=18.0.0' } - '@smithy/querystring-parser@4.0.1': resolution: { integrity: sha512-Ma2XC7VS9aV77+clSFylVUnPZRindhB7BbmYiNOdr+CHt/kZNJoPP0cd3QxCnCFyPXC4eybmyE98phEHkqZ5Jw== } engines: { node: '>=18.0.0' } @@ -5882,10 +6133,6 @@ packages: resolution: { integrity: sha512-JW2Hthy21evnvDmYYk1kItOmbp3X5XI5iqorXgFEunb6hQfSDZ7O1g0Clyxg7k/Pcr9pfLk5xDIR2To/IohlsQ== } engines: { node: '>=14.0.0' } - '@smithy/service-error-classification@4.0.0': - resolution: { integrity: sha512-hIZreT6aXSG0PK/psT1S+kfeGTnYnRRlf7rU3yDmH/crSVjTbS/5h5w2J7eO2ODrQb3xfhJcYxQBREdwsZk6TA== } - engines: { node: '>=18.0.0' } - '@smithy/service-error-classification@4.0.1': resolution: { integrity: sha512-3JNjBfOWpj/mYfjXJHB4Txc/7E4LVq32bwzE7m28GN79+M1f76XHflUaSUkhOriprPDzev9cX/M+dEB80DNDKA== } engines: { node: '>=18.0.0' } @@ -5894,10 +6141,6 @@ packages: resolution: { integrity: sha512-oI99+hOvsM8oAJtxAGmoL/YCcGXtbP0fjPseYGaNmJ4X5xOFTer0KPk7AIH3AL6c5AlYErivEi1X/X78HgTVIw== } engines: { node: '>=14.0.0' } - '@smithy/shared-ini-file-loader@4.0.0': - resolution: { integrity: sha512-Ktupe8msp2GPaKKVfiz3NNUNnslJiGGRoVh3BDpm/RChkQ5INQpqmTc2taE0XChNYumNynLfb3keekIPaiaZeg== } - engines: { node: '>=18.0.0' } - '@smithy/shared-ini-file-loader@4.0.1': resolution: { integrity: sha512-hC8F6qTBbuHRI/uqDgqqi6J0R4GtEZcgrZPhFQnMhfJs3MnUTGSnR1NSJCJs5VWlMydu0kJz15M640fJlRsIOw== } engines: { node: '>=18.0.0' } @@ -5906,10 +6149,6 @@ packages: resolution: { integrity: sha512-gnu9gCn0qQ8IdhNjs6o3QVCXzUs33znSDYwVMWo3nX4dM6j7z9u6FC302ShYyVWfO4MkVMuGCCJ6nl3PcH7V1Q== } engines: { node: '>=14.0.0' } - '@smithy/signature-v4@5.0.0': - resolution: { integrity: sha512-zqcOR1sZTuoA6K3PBNwzu4YgT1pmIwz47tYpgaJjBTfGUIMtcjUaXKtuSKEScdv+0wx45/PbXz0//hk80fky3w== } - engines: { node: '>=18.0.0' } - '@smithy/signature-v4@5.0.1': resolution: { integrity: sha512-nCe6fQ+ppm1bQuw5iKoeJ0MJfz2os7Ic3GBjOkLOPtavbD1ONoyE3ygjBfz2ythFWm4YnRm6OxW+8p/m9uCoIA== } engines: { node: '>=18.0.0' } @@ -5918,8 +6157,8 @@ packages: resolution: { integrity: sha512-SNE17wjycPZIJ2P5sv6wMTteV/vQVPdaqQkoK1KeGoWHXx79t3iLhQXj1uqRdlkMUS9pXJrLOAS+VvUSOYwQKw== } engines: { node: '>=14.0.0' } - '@smithy/smithy-client@4.0.0': - resolution: { integrity: sha512-AgcZ6B+JuqArYioAbaYrCpTCjYsD3/1hPSXntbN2ipsfc4hE+72RFZevUPYgsKxpy3G+QxuLfqm11i3+oX4oSA== } + '@smithy/smithy-client@4.1.1': + resolution: { integrity: sha512-nxsNWCDmWR6LrnC55+fKhbuA1S9v/gNh+5BSiYEQ5X8OYCRZj3G8DBoLoWNc5oXd7LOXvoPEXRnsRph4at8Ttw== } engines: { node: '>=18.0.0' } '@smithy/smithy-client@4.1.6': @@ -5930,9 +6169,9 @@ packages: resolution: { integrity: sha512-AR0SXO7FuAskfNhyGfSTThpLRntDI5bOrU0xrpVYU0rZyjl3LBXInZFMTP/NNSd7IS6Ksdtar0QvnrPRIhVrLQ== } engines: { node: '>=14.0.0' } - '@smithy/types@4.0.0': - resolution: { integrity: sha512-aNwIGSOgDOhtTRY/rrn2aeuQeKw/IFrQ998yK5l6Ah853WeWIEmFPs/EO4OpfADEdcK+igWnZytm/oUgkLgUYg== } - engines: { node: '>=18.0.0' } + '@smithy/types@3.3.0': + resolution: { integrity: sha512-IxvBBCTFDHbVoK7zIxqA1ZOdc4QfM5HM7rGleCuHi7L1wnKv5Pn69xXJQ9hgxH60ZVygH9/JG0jRgtUncE3QUA== } + engines: { node: '>=16.0.0' } '@smithy/types@4.1.0': resolution: { integrity: sha512-enhjdwp4D7CXmwLtD6zbcDMbo6/T6WtuuKCY49Xxc6OMOmUWlBEBDREsxxgV2LIdeQPW756+f97GzcgAwp3iLw== } @@ -5941,10 +6180,6 @@ packages: '@smithy/url-parser@2.1.4': resolution: { integrity: sha512-1hTy6UYRYqOZlHKH2/2NzdNQ4NNmW2Lp0sYYvztKy+dEQuLvZL9w88zCzFQqqFer3DMcscYOshImxkJTGdV+rg== } - '@smithy/url-parser@4.0.0': - resolution: { integrity: sha512-2iPpuLoH0hCKpLtqVgilHtpPKsmHihbkwBm3h3RPuEctdmuiOlFRZ2ZI8IHSwl0o4ff5IdyyJ0yu/2tS9KpUug== } - engines: { node: '>=18.0.0' } - '@smithy/url-parser@4.0.1': resolution: { integrity: sha512-gPXcIEUtw7VlK8f/QcruNXm7q+T5hhvGu9tl63LsJPZ27exB6dtNwvh2HIi0v7JcXJ5emBxB+CJxwaLEdJfA+g== } engines: { node: '>=18.0.0' } @@ -5992,8 +6227,8 @@ packages: resolution: { integrity: sha512-lM2JMYCilrejfGf8WWnVfrKly3vf+mc5x9TrTpT++qIKP452uWfLqlaUxbz1TkSfhqm8RjrlY22589B9aI8A9w== } engines: { node: '>= 10.0.0' } - '@smithy/util-defaults-mode-browser@4.0.0': - resolution: { integrity: sha512-7wqsXkzaJkpSqV+Ca95pN9yQutXvhaKeCxGGmjWnRGXY1fW/yR7wr1ouNnUYCJuTS8MvmB61xp5Qdj8YMgIA2Q== } + '@smithy/util-defaults-mode-browser@4.0.2': + resolution: { integrity: sha512-A7mlrRyOMxujL8M5rpCGR0vNdJoN1xP87cXQx+rmMTK0LBDlFg0arRQSqtbckNRNEqfjFx3Dna27tmDNUbAgGQ== } engines: { node: '>=18.0.0' } '@smithy/util-defaults-mode-browser@4.0.7': @@ -6004,8 +6239,8 @@ packages: resolution: { integrity: sha512-UmUbPHbkBJCXRFbq+FPLpVwiFPHj1oPWXJS2f2sy23PtXM94c9X5EceI6JKuKdBty+tzhrAs5JbmPM/HvmDB8Q== } engines: { node: '>= 10.0.0' } - '@smithy/util-defaults-mode-node@4.0.0': - resolution: { integrity: sha512-P8VK885kiRT6TEtvcQvz+L/+xIhrDhCmM664ToUtrshFSBhwGYaJWlQNAH9fXlMhwnNvR+tmh1KngKJIgQP6bw== } + '@smithy/util-defaults-mode-node@4.0.2': + resolution: { integrity: sha512-iyv3X7zfatV/6Oh1HNCqscTrRGUJUEDLOVv6fmGL7vjgUvEQ1xgKBbuIG8UP0dDbcYk0f96kjn9jbc0IdCmLyw== } engines: { node: '>=18.0.0' } '@smithy/util-defaults-mode-node@4.0.7': @@ -6016,10 +6251,6 @@ packages: resolution: { integrity: sha512-tgDpaUNsUtRvNiBulKU1VnpoXU1GINMfZZXunRhUXOTBEAufG1Wp79uDXLau2gg1RZ4dpAR6lXCkrmddihCGUg== } engines: { node: '>= 14.0.0' } - '@smithy/util-endpoints@3.0.0': - resolution: { integrity: sha512-kyOKbkg77lsIVN2jC08uEWm3s16eK1YdVDyi/nKeBDbUnjR30dmTEga79E5tiu5OEgTAdngNswA9V+L6xa65sA== } - engines: { node: '>=18.0.0' } - '@smithy/util-endpoints@3.0.1': resolution: { integrity: sha512-zVdUENQpdtn9jbpD9SCFK4+aSiavRb9BxEtw9ZGUR1TYo6bBHbIoi7VkrFQ0/RwZlzx0wRBaRmPclj8iAoJCLA== } engines: { node: '>=18.0.0' } @@ -6036,10 +6267,6 @@ packages: resolution: { integrity: sha512-5yYNOgCN0DL0OplME0pthoUR/sCfipnROkbTO7m872o0GHCVNJj5xOFJ143rvHNA54+pIPMLum4z2DhPC2pVGA== } engines: { node: '>=14.0.0' } - '@smithy/util-middleware@4.0.0': - resolution: { integrity: sha512-ncuvK6ekpDqtASHg7jx3d3nrkD2BsTzUmeVgvtepuHGxtySY8qUlb4SiNRdxHYcv3pL2SwdXs70RwKBU0edW5w== } - engines: { node: '>=18.0.0' } - '@smithy/util-middleware@4.0.1': resolution: { integrity: sha512-HiLAvlcqhbzhuiOa0Lyct5IIlyIz0PQO5dnMlmQ/ubYM46dPInB+3yQGkfxsk6Q24Y0n3/JmcA1v5iEhmOF5mA== } engines: { node: '>=18.0.0' } @@ -6048,10 +6275,6 @@ packages: resolution: { integrity: sha512-JRZwhA3fhkdenSEYIWatC8oLwt4Bdf2LhHbNQApqb7yFoIGMl4twcYI3BcJZ7YIBZrACA9jGveW6tuCd836XzQ== } engines: { node: '>= 14.0.0' } - '@smithy/util-retry@4.0.0': - resolution: { integrity: sha512-64WFoC19NVuHh3HQO2QbGw+n6GzQ6VH/drxwXLOU3GDLKxUUzIR9XNm9aTVqh8/7R+y+DgITiv5LpX5XdOy73A== } - engines: { node: '>=18.0.0' } - '@smithy/util-retry@4.0.1': resolution: { integrity: sha512-WmRHqNVwn3kI3rKk1LsKcVgPBG6iLTBGC1iYOV3GQegwJ3E8yjzHytPt26VNzOWr1qu0xE03nK0Ug8S7T7oufw== } engines: { node: '>=18.0.0' } @@ -6060,8 +6283,8 @@ packages: resolution: { integrity: sha512-CiWaFPXstoR7v/PGHddFckovkhJb28wgQR7LwIt6RsQCJeRIHvUTVWhXw/Pco6Jm6nz/vfzN9FFdj/JN7RTkxQ== } engines: { node: '>=14.0.0' } - '@smithy/util-stream@4.0.0': - resolution: { integrity: sha512-ctcLq8Ogi2FQuGy2RxJXGGrozhFEb4p9FawB5SpTNAkNQWbNHcwrGcVSVI3FtdQtkNAINLiEdMnrx+UN/mafvw== } + '@smithy/util-stream@4.0.1': + resolution: { integrity: sha512-Js16gOgU6Qht6qTPfuJgb+1YD4AEO+5Y1UPGWKSp3BNo8ONl/qhXSYDhFKJtwybRJynlCqvP5IeiaBsUmkSPTQ== } engines: { node: '>=18.0.0' } '@smithy/util-stream@4.1.2': @@ -6091,8 +6314,8 @@ packages: '@sqltools/formatter@1.2.5': resolution: { integrity: sha512-Uy0+khmZqUrUGm5dmMqVlnvufZRSK0FbYzVgp0UMstm+F5+W2/jnEEQyc9vo1ZR/E5ZI/B1WjjoTqBqwJL6Krw== } - '@stripe/agent-toolkit@0.1.20': - resolution: { integrity: sha512-Qg7OVkkIQhsOwjQOQiwG6ldKBDNM42tjc6qyTBPCR+8aMrf33vTfhHjvLv8NjtOCt2eElBdVqH78JBS5DZi1Xg== } + '@stripe/agent-toolkit@0.1.21': + resolution: { integrity: sha512-fuKNgCgqLih+L1um0rKuwLSypn9dgvFnG6VKtJnFVwEVrqKZTcD1KXMNuwru+6FWhpkjbmshGWZRwx0VtEsR1Q== } engines: { node: '>=18' } peerDependencies: '@langchain/core': 0.3.37 @@ -6500,6 +6723,9 @@ packages: '@types/babel__traverse@7.20.5': resolution: { integrity: sha512-WXCyOcRtH37HAUkpXhUduaxdm82b4GSlyTqajXviN4EfiuPgNYR109xMCKvpl6zPIpua0DGlMEDCq+g8EdoheQ== } + '@types/bcryptjs@2.4.6': + resolution: { integrity: sha512-9xlo6R2qDs5uixm0bcIqCeMCE6HiQsIyel9KQySStiyqNl2tnj2mP3DX1Nf56MD6KMenNNlBBsy3LJ7gUEQPXQ== } + '@types/body-parser@1.19.5': resolution: { integrity: sha512-fB3Zu92ucau0iQ0JMCFQE7b/dv8Ot07NI3KaZIkIUNXq82k4eBAqUaneXfleGY9JWskeS9y+u0nXMyspcuQrCg== } @@ -6530,6 +6756,12 @@ packages: '@types/content-disposition@0.5.8': resolution: { integrity: sha512-QVSSvno3dE0MgO76pJhmv4Qyi/j0Yk9pBp0Y7TJ2Tlj+KCgJWY6qX7nnxCOLkZ3VYRSIk1WTxCvwUSdx6CCLdg== } + '@types/cookie-parser@1.4.7': + resolution: { integrity: sha512-Fvuyi354Z+uayxzIGCwYTayFKocfV7TuDYZClCdIP9ckhvAu/ixDtCB6qx2TT0FKjPLf1f3P/J1rgf6lPs64mw== } + + '@types/cookiejar@2.1.5': + resolution: { integrity: sha512-he+DHOWReW0nghN24E1WUqM0efK4kI9oTqDm6XmK8ZPe2djZ90BSNdGnIyCLzCPw7/pogPlGbzI2wHGGmi4O/Q== } + '@types/cors@2.8.17': resolution: { integrity: sha512-8CGDvrBj1zgo2qE+oS3pOCyYNqCPryMWY2bGfwA0dcfopWGgxs+78df0Rs3rc9THP4JkOhLsAa+15VdpAqkcUA== } @@ -6656,6 +6888,9 @@ packages: '@types/express-serve-static-core@5.0.6': resolution: { integrity: sha512-3xhRnjJPkULekpSzgtoNYYcTWgEZkp4myc+Saevii5JPnHNvHMRlBSHDbs7Bh1iPPoVTERHEZXyhyLbMEsExsA== } + '@types/express-session@1.18.0': + resolution: { integrity: sha512-27JdDRgor6PoYlURY+Y5kCakqp5ulC0kmf7y+QwaY+hv9jEFuQOThgkjyA53RP3jmKuBsH5GR6qEfFmvb8mwOA== } + '@types/express@4.17.21': resolution: { integrity: sha512-ejlPM315qwLpaQlQDTjPdsUFSc6ZsP4AN6AlWnogPjQ7CVi7PYF3YVz+CY3jE2pwYf7E/7HlDAN0rV2GxTG0HQ== } @@ -6707,8 +6942,8 @@ packages: '@types/istanbul-reports@3.0.4': resolution: { integrity: sha512-pk2B1NWalF9toCRu6gjBzR69syFjP4Od8WRAX+0mmf9lAjCRicLOWc+ZrxZHx/0XRjotgkF9t6iaMJ+aXcOdZQ== } - '@types/jest@29.5.12': - resolution: { integrity: sha512-eDC8bTvT/QhYdxJAulQikueigY5AsdBRH2yDKW3yveW7svY3+DzN84/2NUgkw10RTiJbWqZrTtoGVdYlvFJdLw== } + '@types/jest@29.5.14': + resolution: { integrity: sha512-ZN+4sdnLUbo8EVvVc2ao0GFW6oVrQRPn4K2lglySj7APvSrgzxHiNNK99us4WDMi57xxA2yggblIAMNhXOotLQ== } '@types/js-yaml@4.0.9': resolution: { integrity: sha512-k4MGaQl5TGo/iipqb2UDG2UwjXziSWkh0uysQelTlJpX1qGlpUZYm8PnO4DxG1qBomtJUdYJ6qR6xdIah10JLg== } @@ -6722,6 +6957,9 @@ packages: '@types/json5@0.0.29': resolution: { integrity: sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ== } + '@types/jsonwebtoken@9.0.6': + resolution: { integrity: sha512-/5hndP5dCjloafCXns6SZyESp3Ldq7YjH3zwzwczYnjxIT0Fqzk5ROSYVGfFyczIue7IUEj8hkvLbPoLQ18vQw== } + '@types/katex@0.16.7': resolution: { integrity: sha512-HMwFiRujE5PjrgwHQ25+bsLJgowjGjm5Z8FVSf0N6PwgJrwxH0QxzHYDcKsTfV3wva0vzrpqMTJS2jXPr5BMEQ== } @@ -6761,6 +6999,9 @@ packages: '@types/memcached@2.2.10': resolution: { integrity: sha512-AM9smvZN55Gzs2wRrqeMHVP7KE8KWgCJO/XL5yCly2xF6EKa4YlbpK+cLSAH4NG/Ah64HrlegmGqW8kYws7Vxg== } + '@types/methods@1.1.4': + resolution: { integrity: sha512-ymXWVrDiCxTBE3+RIrrP533E70eA+9qu7zdWoHuOmGujkYtzf4HQF96b8nwHLqhuf4ykX61IGRIB38CC6/sImQ== } + '@types/mime@1.3.5': resolution: { integrity: sha512-/pyBZWSLD2n0dcHE3hq8s8ZvcETHtEuF+3E7XVt0Ig2nvsVQXdghHVcEkIWjy9A0wKfTn97a/PSDYohKIlnP/w== } @@ -6803,18 +7044,21 @@ packages: '@types/node@18.19.23': resolution: { integrity: sha512-wtE3d0OUfNKtZYAqZb8HAWGxxXsImJcPUAgZNw+dWFxO6s5tIwIjyKnY76tsTatsNCLJPkVYwUpq15D38ng9Aw== } - '@types/node@20.11.26': - resolution: { integrity: sha512-YwOMmyhNnAWijOBQweOJnQPl068Oqd4K3OFbTc6AHJwzweUwwWG3GIFY74OKks2PJUDkQPeddOQES9mLn1CTEQ== } - '@types/node@20.12.12': resolution: { integrity: sha512-eWLDGF/FOSPtAvEqeRAQ4C8LSA7M1I7i0ky1I8U7kD1J5ITyW3AsRhQrKVoWf5pFKZ2kILsEGJhsI9r93PYnOw== } - '@types/node@22.13.9': - resolution: { integrity: sha512-acBjXdRJ3A6Pb3tqnw9HZmyR3Fiol3aGxRCK1x3d+6CDAMjl7I649wpSd+yNURCjbOUGu9tqtLKnTGxmK6CyGw== } + '@types/node@22.5.4': + resolution: { integrity: sha512-FDuKUJQm/ju9fT/SeX/6+gBzoPzlVCzfzmGkwKvRHQVxi4BntVbyIwf6a4Xn62mrvndLiml6z/UBXIdEVjQLXg== } + + '@types/nodemailer@6.4.15': + resolution: { integrity: sha512-0EBJxawVNjPkng1zm2vopRctuWVCxk34JcIlRuXSf54habUWdz1FB7wHDqOqvDa8Mtpt0Q3LTXQkAs2LNyK5jQ== } '@types/normalize-package-data@2.4.4': resolution: { integrity: sha512-37i+OaWTh9qeK4LSHPsyRC7NahnGotNuZvjLSgcPzblpHB3rrCJxAOgI5gCdKm7coonsaX1Of0ILiTcnZjbfxA== } + '@types/oauth@0.9.6': + resolution: { integrity: sha512-H9TRCVKBNOhZZmyHLqFt9drPM9l+ShWiqqJijU1B8P3DX3ub84NjxDuy+Hjrz+fEca5Kwip3qPMKNyiLgNJtIA== } + '@types/object-hash@3.0.6': resolution: { integrity: sha512-fOBV8C1FIu2ELinoILQ+ApxcUKz4ngq+IWUYrxSGjXzzjUALijilampwkMgEtJ+h2njAW3pi853QpzNVCHB73w== } @@ -6824,6 +7068,30 @@ packages: '@types/parse-json@4.0.2': resolution: { integrity: sha512-dISoDXWWQwUquiKsyZ4Ng+HX2KsPL7LyHKHQwgGFEA3IaKac4Obd+h2a/a6waisAoepJlBcx9paWqjA8/HVjCw== } + '@types/passport-auth0@1.0.9': + resolution: { integrity: sha512-xHYzOkq0qy0U/4QyUnB5JzutGrLARd435Q/5rr0e2kkW3Q49UTJkrohgOibFyw66bfV7aupkDu/in5WfMqJZSg== } + + '@types/passport-github@1.1.12': + resolution: { integrity: sha512-VJpMEIH+cOoXB694QgcxuvWy2wPd1Oq3gqrg2Y9DMVBYs9TmH9L14qnqPDZsNMZKBDH+SvqRsGZj9SgHYeDgcA== } + + '@types/passport-jwt@4.0.1': + resolution: { integrity: sha512-Y0Ykz6nWP4jpxgEUYq8NoVZeCQPo1ZndJLfapI249g1jHChvRfZRO/LS3tqu26YgAS/laI1qx98sYGz0IalRXQ== } + + '@types/passport-local@1.0.38': + resolution: { integrity: sha512-nsrW4A963lYE7lNTv9cr5WmiUD1ibYJvWrpE13oxApFsRt77b0RdtZvKbCdNIY4v/QZ6TRQWaDDEwV1kCTmcXg== } + + '@types/passport-oauth2@1.4.17': + resolution: { integrity: sha512-ODiAHvso6JcWJ6ZkHHroVp05EHGhqQN533PtFNBkg8Fy5mERDqsr030AX81M0D69ZcaMvhF92SRckEk2B0HYYg== } + + '@types/passport-openidconnect@0.1.3': + resolution: { integrity: sha512-k1Ni7bG/9OZNo2Qpjg2W6GajL+pww6ZPaNWMXfpteCX4dXf4QgaZLt2hjR5IiPrqwBT9+W8KjCTJ/uhGIoBx/g== } + + '@types/passport-strategy@0.2.38': + resolution: { integrity: sha512-GC6eMqqojOooq993Tmnmp7AUTbbQSgilyvpCYQjT+H6JfG/g6RGc7nXEniZlp0zyKJ0WUdOiZWLBZft9Yug1uA== } + + '@types/passport@1.0.16': + resolution: { integrity: sha512-FD0qD5hbPWQzaM0wHUnJ/T0BBCJBxCeemtnCwc/ThhTg3x9jfrAcRUmj5Dopza+MfFS9acTe3wk7rcVnRIp/0A== } + '@types/pg-pool@2.0.6': resolution: { integrity: sha512-TaAUE5rq2VQYxab5Ts7WZhKNmuN78Q6PiFonTDdpbx8a1H0M1vhy3rhiMjl+e2iHmogyMw7jZF4FrE6eJUy5HQ== } @@ -6931,6 +7199,12 @@ packages: '@types/stylis@4.2.5': resolution: { integrity: sha512-1Xve+NMN7FWjY14vLoY5tL3BVEQ/n42YLwaqJIPYhotZ9uBHt87VceMwWQpzmdEt2TNXIorIFG+YeCUUW7RInw== } + '@types/superagent@8.1.9': + resolution: { integrity: sha512-pTVjI73witn+9ILmoJdajHGW2jkSaOzhiFYF1Rd3EQ94kymLqB9PjD9ISg7WaALC7+dCHT0FGe9T2LktLq/3GQ== } + + '@types/supertest@6.0.3': + resolution: { integrity: sha512-8WzXq62EXFhJ7QsH3Ocb/iKQ/Ty9ZVWnVzoTKc9tyyFRRF3a74Tk2+TLFgaFFw364Ere+npzHKEJ6ga2LzIL7w== } + '@types/swagger-jsdoc@6.0.4': resolution: { integrity: sha512-W+Xw5epcOZrF/AooUM/PccNMSAFOKWZA5dasNyMujTwsBkU74njSJBpvCCJhHAJ95XRMzQrrW844Btu0uoetwQ== } @@ -7263,6 +7537,7 @@ packages: acorn-import-assertions@1.9.0: resolution: { integrity: sha512-cmMwop9x+8KFhxvKrKfPYmN6/pKTYYHBqLa0DfvVZcKMJWNyWLnaqND7dx/qn66R7ewM1UX5XMaDVP5wlVTaVA== } + deprecated: package has been renamed to acorn-import-attributes peerDependencies: acorn: ^8 @@ -7737,6 +8012,12 @@ packages: peerDependencies: '@babel/core': ^7.8.0 + babel-jest@29.7.0: + resolution: { integrity: sha512-BrvGY3xZSwEcCzKvKsCi2GgHqDqsYkOP4/by5xCgIwGXQxIEh+8ew3gmrE1y7XRR6LHZIj6yLYnUi/mm2KXKBg== } + engines: { node: ^14.15.0 || ^16.10.0 || >=18.0.0 } + peerDependencies: + '@babel/core': ^7.8.0 + babel-loader@8.3.0: resolution: { integrity: sha512-H8SvsMF+m9t15HNLMipppzkC+Y2Yq+v3SonZyU70RBL/h1gxPkH08Ot8pEE9Z4Kd+czyWJClmFS8qzIP9OZ04Q== } engines: { node: '>= 8.9' } @@ -7755,6 +8036,10 @@ packages: resolution: { integrity: sha512-50wCwD5EMNW4aRpOwtqzyZHIewTYNxLA4nhB+09d8BIssfNfzBRhkBIHiaPv1Si226TQSvp8gxAJm2iY2qs2hQ== } engines: { node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0 } + babel-plugin-jest-hoist@29.6.3: + resolution: { integrity: sha512-ESAc/RJvGTFEzRwOTT4+lNDk/GNHMkKbNzsvT0qKRfDyyYTskxB5rnU2njIDYVxXCBHHEI1c0YwHob3WaYujOg== } + engines: { node: ^14.15.0 || ^16.10.0 || >=18.0.0 } + babel-plugin-macros@3.1.0: resolution: { integrity: sha512-Cg7TFGpIr01vOQNODXOOaGz2NpCU5gl8x1qJFbb6hbZxR7XrcE2vtbAsTAbJ7/xwJtUuJEw8K8Zr/AE0LHlesg== } engines: { node: '>=10', npm: '>=6' } @@ -7808,6 +8093,12 @@ packages: peerDependencies: '@babel/core': ^7.0.0 + babel-preset-jest@29.6.3: + resolution: { integrity: sha512-0B3bhxR6snWXJZtR/RliHTDPRgn1sNHOR0yVtq/IiQFyuOVjFS+wuio/R4gSNkyYmKmJB4wGZv2NZanmKmTnNA== } + engines: { node: ^14.15.0 || ^16.10.0 || >=18.0.0 } + peerDependencies: + '@babel/core': ^7.0.0 + babel-preset-react-app@10.0.1: resolution: { integrity: sha512-b0D9IZ1WhhCWkrTXyFuIIgqGzSkRIH5D5AmB0bXbzYAB1OBAwHcUeyWW2LorutLWF5btNo/N7r/cIdmvvKJlYg== } @@ -7861,6 +8152,10 @@ packages: base64-js@1.5.1: resolution: { integrity: sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA== } + base64url@3.0.1: + resolution: { integrity: sha512-ir1UPr3dkwexU7FdV8qBBbNDRUhMmIekYMFZfi+C/sLNnRESKPl23nB9b2pltqfOQNnGzsDdId90AEtG5tCx4A== } + engines: { node: '>=6.0.0' } + base@0.11.2: resolution: { integrity: sha512-5T6P4xPgpp0YDFvSWwEZ4NoE3aM4QBQXDzmVbraCkFj8zHM+mba8SyqB5DbZWyR7mYHo6Y7BdQo3MoA4m0TeQg== } engines: { node: '>=0.10.0' } @@ -7879,6 +8174,9 @@ packages: bcrypt-pbkdf@1.0.2: resolution: { integrity: sha512-qeFIXtP4MSoi6NLqO12WfqARWWuCKi2Rn/9hJLEmtB5yTNr9DqFWkJRCf2qShWzPeAMRnOgCrq0sg/KLv5ES9w== } + bcryptjs@2.4.3: + resolution: { integrity: sha512-V/Hy/X9Vt7f3BbPJEi8BdVFMByHi+jNXrYkW3huaybV/kQ0KJg0Y6PkEMbn+zeT+i+SiKZ/HMqJGIIt4LZDqNQ== } + before-after-hook@2.2.3: resolution: { integrity: sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ== } @@ -7976,6 +8274,10 @@ packages: engines: { node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7 } hasBin: true + bs-logger@0.2.6: + resolution: { integrity: sha512-pd8DCoxmbgc7hyPKOvxtqNcjYoOsABPQdcCUjGp3d42VR2CX1ORhk2A87oqqu5R1kk+76nsxZupkmyd+MVtCog== } + engines: { node: '>= 6' } + bser@2.1.1: resolution: { integrity: sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ== } @@ -8039,8 +8341,8 @@ packages: resolution: { integrity: sha512-SrmGzrC024OGtK5Wvv/6VhK4s/iq1h0XUrThc0jla8XhEBUdC79UrG24SOXs68zj7yZnFG0/EG330nPf1Pt5UQ== } deprecated: 2.x is no longer supported, we moved to use @bull-board scope - bullmq@5.43.0: - resolution: { integrity: sha512-gIl9b7yFNV6IE/5I/RsN8yrjkVJR/EC0tE580wF8jpWZukhY3YBJwIoUyMUYIIxeKRscc8DU5eIjuxqeQqfwAA== } + bullmq@5.45.2: + resolution: { integrity: sha512-wHZfcD4z4aLolxREmwNNDSbfh7USeq2e3yu5W2VGkzHMUcrH0fzZuRuCMsjD0XKS9ViK1U854oM9yWR6ftPeDA== } bundle-name@3.0.0: resolution: { integrity: sha512-PKA4BeSvBpQKQ8iPOGCSiell+N8P+Tf1DlwqmYhpe2gAhKPHn8EYOxVT+ShuGmhg8lN8XiSlS80yiExKXrURlw== } @@ -8074,6 +8376,9 @@ packages: resolution: { integrity: sha512-AKcdTnFSWATd5/GCPRxr2ChwIJ85CeyrEyjRHlKxQ56d4XJMGym0uAiKn0xbLOGOl3+yRpOTi484dVCEc5AUzQ== } engines: { node: '>=0.10.0' } + cache-manager@6.4.2: + resolution: { integrity: sha512-oT0d1cGWZAlqEGDPjOfhmldTS767jT6kBT3KIdn7MX5OevlRVYqJT+LxRv5WY4xW9heJtYxeRRXaoKlEW+Biew== } + cacheable-lookup@5.0.4: resolution: { integrity: sha512-2/kNscPhpcxrOigMZzbiWF7dz8ilhb/nIHU3EyZiXWXpeq/au8qJ8VhdftMkty3n7Gj6HIGalQG8oiBNB3AJgA== } engines: { node: '>=10.6.0' } @@ -8226,13 +8531,14 @@ packages: resolution: { integrity: sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ== } engines: { node: '>=10' } - chromadb@1.10.0: - resolution: { integrity: sha512-pgKgL2QcZpX+XJzmDQ9I0m1TpP5CqKaHl/Np8PmNRuXmj7vzaeUCPKdbAc6HA7qQr/HFaZFnJ9y4ejhwDRGqFQ== } + chromadb@1.10.3: + resolution: { integrity: sha512-8gudGHCLfuFIIb3O28hzrHQ5F+qRotw2TaD+xRSIJIdKUQ4yml0EqqynZCNb5KDgEN78hj8USDq96KNDi4YH9A== } engines: { node: '>=14.17.0' } peerDependencies: '@google/generative-ai': ^0.24.0 cohere-ai: ^5.0.0 || ^6.0.0 || ^7.0.0 openai: 4.96.0 + voyageai: ^0.0.3-1 peerDependenciesMeta: '@google/generative-ai': optional: true @@ -8240,6 +8546,8 @@ packages: optional: true openai: optional: true + voyageai: + optional: true chromadb@1.7.3: resolution: { integrity: sha512-3GgvQjpqgk5C89x5EuTDaXKbfrdqYDJ5UVyLQ3ZmwxnpetNc+HhRDGjkvXa5KSvpQ3lmKoyDoqnN4tZepfFkbw== } @@ -8574,6 +8882,20 @@ packages: resolution: { integrity: sha512-U73+6lQFmfiNPrYbXqr6kZ1i1wiRqXnp2nhMsINseWXO8lDau0LGEffJ8kQi4EjLZympVgRdvqjAgiZ1tgzDDA== } engines: { node: '>=0.8' } + connect-pg-simple@10.0.0: + resolution: { integrity: sha512-pBGVazlqiMrackzCr0eKhn4LO5trJXsOX0nQoey9wCOayh80MYtThCbq8eoLsjpiWgiok/h+1/uti9/2/Una8A== } + engines: { node: ^18.18.0 || ^20.9.0 || >=22.0.0 } + + connect-redis@8.0.2: + resolution: { integrity: sha512-8A6rNOkwLQf25kkERnutNRppE3WUJ7B2bTl/DKf0jW9PkhzJQ3+VJYDHsN+NIuTtQrWsQr5n5UX7bm9l8rkTtQ== } + engines: { node: '>=18' } + peerDependencies: + express-session: '>=1' + + connect-sqlite3@0.9.15: + resolution: { integrity: sha512-aJGDtASX8DTUZ++7iTN97vR0vGFpm8jDFew/qHK3veISkCpVpPS0tMdqs7i9fiHLaqaU0Jh3c4sUvNxsizaSTA== } + engines: { node: '>=0.4.x' } + console-control-strings@1.1.0: resolution: { integrity: sha512-ty/fTekppD2fIwRvnZAVdeOiGd1c7YXEixbgJTNzqcxJWKQnjJ/V1bNEEE6hygpM3WjwHFUVK6HTjWSzV4a8sQ== } @@ -8599,9 +8921,16 @@ packages: convert-source-map@2.0.0: resolution: { integrity: sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg== } + cookie-parser@1.4.6: + resolution: { integrity: sha512-z3IzaNjdwUC2olLIB5/ITd0/setiaFMLYiZJle7xg5Fe9KWAceil7xszYfHHBtDFYLSgJduS2Ty0P1uJdPDJeA== } + engines: { node: '>= 0.8.0' } + cookie-signature@1.0.6: resolution: { integrity: sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ== } + cookie-signature@1.0.7: + resolution: { integrity: sha512-NXdYc3dLr47pBkpUCHtKSwIOQXLVn8dZEuywboCOJY/osA0wFSLlSawr3KN8qXJEyX66FcONTH8EIlVuK0yyFA== } + cookie-signature@1.2.2: resolution: { integrity: sha512-D76uU73ulSXrD1UXF4KE2TMxVVwhsnCgfAyTg9k8P6KGZjlXKrOLe4dJQKI3Bxi5wjesZoFXJWElNWBjPZMbhg== } engines: { node: '>=6.6.0' } @@ -8610,6 +8939,10 @@ packages: resolution: { integrity: sha512-+Hp8fLp57wnUSt0tY0tHEXh4voZRDnoIrZPqlo3DPiI4y9lwg/jqx+1Om94/W6ZaPDOUbnjOt/99w66zk+l1Xg== } engines: { node: '>= 0.6' } + cookie@0.4.1: + resolution: { integrity: sha512-ZwrFkGJxUR3EIoXtO+yVE69Eb7KlixbaeAWfBQB9vVsNn/o+Yw69gBWSSDK825hQNdN+wF8zELf3dFNl/kxkUA== } + engines: { node: '>= 0.6' } + cookie@0.5.0: resolution: { integrity: sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw== } engines: { node: '>= 0.6' } @@ -8618,6 +8951,13 @@ packages: resolution: { integrity: sha512-6DnInpx7SJ2AK3+CTUE/ZM0vWTUboZCegxhC2xiIydHR9jNuTAASBrfEpHhiGOZw/nX51bHt6YQl8jsGo4y/0w== } engines: { node: '>= 0.6' } + cookie@0.7.2: + resolution: { integrity: sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w== } + engines: { node: '>= 0.6' } + + cookiejar@2.1.4: + resolution: { integrity: sha512-LDx6oHrK+PhzLKJU9j5S7/Y3jM/mUHvD/DeI1WQmJn652iPC5Y4TBzC9l+5OMOXlyTTA+SmVUPm0HQUwpD5Jqw== } + copy-descriptor@0.1.1: resolution: { integrity: sha512-XgZ0pFcakEUlbwQEVNg3+QAis1FyTL3Qel9FYy8pSkQqoG3PNoT0bOCQtOXcOkur21r2Eq2kI+IE+gsmAEVlYw== } engines: { node: '>=0.10.0' } @@ -8671,6 +9011,11 @@ packages: resolution: { integrity: sha512-9IkYqtX3YHPCzoVg1Py+o9057a3i0fp7S530UWokCSaFVTc7CwXPRiOjRjBQQ18ZCNafx78YfnG+HALxtVmOGA== } engines: { node: '>=10.0.0' } + create-jest@29.7.0: + resolution: { integrity: sha512-Adz2bdH0Vq3F53KEMJOoftQFutWCukm6J24wbPWRO4k1kMY7gS7ds/uoJkNuV8wDCtWWnuwGcJwpWcih+zEW1Q== } + engines: { node: ^14.15.0 || ^16.10.0 || >=18.0.0 } + hasBin: true + create-require@1.1.1: resolution: { integrity: sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ== } @@ -8860,11 +9205,20 @@ packages: csstype@3.1.3: resolution: { integrity: sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw== } + csv-parser@3.0.0: + resolution: { integrity: sha512-s6OYSXAK3IdKqYO33y09jhypG/bSDHPuyCme/IdEHfWpLf/jKcpitVFyOC6UemgGk8v7Q5u2XE0vvwmanxhGlQ== } + engines: { node: '>= 10' } + hasBin: true + cypress@13.13.0: resolution: { integrity: sha512-ou/MQUDq4tcDJI2FsPaod2FZpex4kpIK43JJlcBgWrX8WX7R/05ZxGTuxedOuZBfxjZxja+fbijZGyxiLP6CFA== } engines: { node: ^16.0.0 || ^18.0.0 || >=20.0.0 } hasBin: true + d3-array@3.2.4: + resolution: { integrity: sha512-tdQAmyA18i4J7wprpYq8ClcxZy3SC31QMeByyCFyRt7BVHdREQZ5lpzoe5mFEYZUWe+oq8HBvk9JjpibyEV4Jg== } + engines: { node: '>=12' } + d3-color@3.1.0: resolution: { integrity: sha512-zg/chbXyeBtMQ1LbD/WSoW2DpC3I0mpmPdW+ynRTj/x2DAWYrIY7qeZIHidozwV24m4iavr15lNwIwLxRmOxhA== } engines: { node: '>=12' } @@ -8885,14 +9239,38 @@ packages: resolution: { integrity: sha512-wR/XK3D3XcLIZwpbvQwQ5fK+8Ykds1ip7A2Txe0yxncXSdq1L9skcG7blcedkOX+ZcgxGAmLX1FrRGbADwzi0w== } engines: { node: '>=12' } + d3-format@3.1.0: + resolution: { integrity: sha512-YyUI6AEuY/Wpt8KWLgZHsIU86atmikuoOmCfommt0LYHiQSPjvX2AcFc38PX0CBpr2RCyZhjex+NS/LPOv6YqA== } + engines: { node: '>=12' } + d3-interpolate@3.0.1: resolution: { integrity: sha512-3bYs1rOD33uo8aqJfKP3JWPAibgw8Zm2+L9vBKEHJ2Rg+viTR7o5Mmv5mZcieN+FRYaAOWX5SJATX6k1PWz72g== } engines: { node: '>=12' } + d3-path@3.1.0: + resolution: { integrity: sha512-p3KP5HCf/bvjBSSKuXid6Zqijx7wIfNW+J/maPs+iwR35at5JCbLUT0LzF1cnjbCHWhqzQTIN2Jpe8pRebIEFQ== } + engines: { node: '>=12' } + + d3-scale@4.0.2: + resolution: { integrity: sha512-GZW464g1SH7ag3Y7hXjf8RoUuAFIqklOAq3MRl4OaWabTFJY9PN/E1YklhXLh+OQ3fM9yS2nOkCoS+WLZ6kvxQ== } + engines: { node: '>=12' } + d3-selection@3.0.0: resolution: { integrity: sha512-fmTRWbNMmsmWq6xJV8D19U/gw/bwrHfNXxrIN+HfZgnzqTHp9jOmKMhsTUjXOJnZOdZY9Q28y4yebKzqDKlxlQ== } engines: { node: '>=12' } + d3-shape@3.2.0: + resolution: { integrity: sha512-SaLBuwGm3MOViRq2ABk3eLoxwZELpH6zhl3FbAoJ7Vm1gofKx6El1Ib5z23NUEhF9AsGl7y+dzLe5Cw2AArGTA== } + engines: { node: '>=12' } + + d3-time-format@4.1.0: + resolution: { integrity: sha512-dJxPBlzC7NugB2PDLwo9Q8JiTR3M3e4/XANkreKSUxF8vvXKqm1Yfq4Q5dl8budlunRVlUUaDUgFt7eA8D6NLg== } + engines: { node: '>=12' } + + d3-time@3.1.0: + resolution: { integrity: sha512-VqKjzBLejbSMT4IgbmVgDjpkYrNWUYJnbCGo874u7MMKIWsILRX+OpX/gTk8MqjpT1A/c6HY2dCA77ZN0lkQ2Q== } + engines: { node: '>=12' } + d3-timer@3.0.1: resolution: { integrity: sha512-ndfJ/JxxMd3nw31uyKoY2naivF+r29V+Lc0svZxe1JvvIRmi8hUsrMvdOwgS1o6uBHmiz91geQ0ylPP0aj1VUA== } engines: { node: '>=12' } @@ -9020,6 +9398,9 @@ packages: resolution: { integrity: sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA== } engines: { node: '>=0.10.0' } + decimal.js-light@2.5.1: + resolution: { integrity: sha512-qIMFpTMZmny+MMIitAB6D7iVPEorVw6YQRWkvarTkT4tBeSLLiHzcwj6q0MmYSFCiVpiqPJTJEYIrpcPzVEIvg== } + decimal.js@10.4.3: resolution: { integrity: sha512-VBBaLc1MgL5XpzgIP7ny5Z6Nx3UrRkIViUkPUdtl9aya5amy3De1gsUUSB1g3+3sExYNjCAsAznmukyxCb1GRA== } @@ -9045,6 +9426,14 @@ packages: dedent@0.7.0: resolution: { integrity: sha512-Q6fKUPqnAHAyhiUgFU7BUzLiv0kd8saH9al7tnu5Q/okj6dnupxyTgFIBjVzJATdfIAm9NAsvXNzjaKa+bxVyA== } + dedent@1.5.3: + resolution: { integrity: sha512-NHQtfOOW68WD8lgypbLA5oT+Bt0xXJhiYvoR6SmmNXZfpzOGXwdKWmcwG8N7PwVVWV3eF/68nmD9BaJSsTBhyQ== } + peerDependencies: + babel-plugin-macros: ^3.1.0 + peerDependenciesMeta: + babel-plugin-macros: + optional: true + deep-eql@4.0.0: resolution: { integrity: sha512-GxJC5MOg2KyQlv6WiUF/VAnMj4MWnYiXo4oLgeptOELVoknyErb4Z8+5F/IM/K4g9/80YzzatxmWcyRwUseH0A== } engines: { node: '>=6' } @@ -9164,6 +9553,10 @@ packages: resolution: { integrity: sha512-BDKtmHlOzwI7iRuEkhzsnPoi5ypEhWAJB5RvHWe1kMr06js3uK5B3734i3ui5Yd+wOJV1cpE4JnivPD283GU/A== } engines: { node: '>=0.10.0' } + detect-libc@2.0.2: + resolution: { integrity: sha512-UX6sGumvvqSaXgdKGUsgZWqcUyIXZ/vZTrlRT/iobiKhGL0zL4d3osHj3uqllWJK+i+sixDS/3COVEOFbupFyw== } + engines: { node: '>=8' } + detect-libc@2.0.3: resolution: { integrity: sha512-bwy0MGW55bG41VqxxypOsdSdGqLwXPI/focwgTYCFMbdUiBAxLg9CFzG08sz2aqzknwiX7Hkl0bQENjg8iLByw== } engines: { node: '>=8' } @@ -9367,6 +9760,10 @@ packages: resolution: { integrity: sha512-aITqOwnLanpHLNXZJENbOgjUBeHocD+xsSJmNrjovKBW5HbSpW3d1pEls7GFQPUWXiwG9+0P4GtHfEqC/4M0Iw== } engines: { node: '>=12' } + emittery@0.13.1: + resolution: { integrity: sha512-DeWwawk6r5yR9jFgnDKYt4sLS0LmHJJi3ZOnb5/JdbYwj3nW+FxQnHIjhBKz8YLC7oRNPVM9NQ47I3CVx34eqQ== } + engines: { node: '>=12' } + emittery@0.8.1: resolution: { integrity: sha512-uDfvUjVrfGJJhymx/kz6prltenw1u7WrCg1oa94zYY8xxVpLLUu045LAT0dhDZdXG58/EpPL/5kA180fQ/qudg== } engines: { node: '>=10' } @@ -9680,6 +10077,7 @@ packages: eslint@8.57.0: resolution: { integrity: sha512-dZ6+mexnaTIbSBZWgou51U6OmzIhYM2VcNdtiTtI7qPNZm35Akpr0f6vtw3w1Kmn5PYo+tZVfh13WrhpS6oLqQ== } engines: { node: ^12.22.0 || ^14.17.0 || >=16.0.0 } + deprecated: This version is no longer supported. Please see https://eslint.org/version-support for other options. hasBin: true esm@3.2.25: @@ -9836,6 +10234,9 @@ packages: express-basic-auth@1.2.1: resolution: { integrity: sha512-L6YQ1wQ/mNjVLAmK3AG1RK6VkokA1BIY6wmiH304Xtt/cLTps40EusZsU1Uop+v9lTDPxdtzbFmdXfFO3KEnwA== } + express-mysql-session@3.0.3: + resolution: { integrity: sha512-sEYrzFrOs3er+Ie/uk1dt93qz4AQ9SU1mpJJ0HPs0MJ4t4hE9AcDRNq0sZQUwy2F/SbXusBt1E5+FY6KzSqXNg== } + express-rate-limit@6.11.2: resolution: { integrity: sha512-a7uwwfNTh1U60ssiIkuLFWHt4hAC5yxlLGU2VP0X4YNlyEDZAqF4tK3GD3NSitVBrCQmQ0++0uOyFOgC2y4DDw== } engines: { node: '>= 14' } @@ -9848,6 +10249,10 @@ packages: peerDependencies: express: ^4.11 || 5 || ^5.0.0-beta.1 + express-session@1.18.1: + resolution: { integrity: sha512-a5mtTqEaZvBCL9A9aqkrtfz+3SMDhOVUnjafjo+s7A9Txkq+SVX2DLvSp1Zrv4uCXa3lMSK3viWnh9Gg07PBUA== } + engines: { node: '>= 0.8.0' } + express@4.17.1: resolution: { integrity: sha512-mHJ9O79RqluphRrcw2X/GTh3k9tVv8YcoyY4Kkh4WDMUYKRZUq0h1o0w2rrrxBqM7VoeUVqgb27xlEMXTnYt4g== } engines: { node: '>= 0.10.0' } @@ -9860,6 +10265,10 @@ packages: resolution: { integrity: sha512-YSFlK1Ee0/GC8QaO91tHcDxJiE/X4FbpAyQWkxAvG6AXCuR65YzK8ua6D9hvi/TzUfZMpc+BwuM1IPw8fmQBiQ== } engines: { node: '>= 0.10.0' } + express@4.21.2: + resolution: { integrity: sha512-28HqgMZAmih1Czt9ny7qr6ek2qddF4FclbMzwhCREB6OFfH+rXAnuNCwo1/wFvrtbgsQDb4kSbX9de9lFbrXnA== } + engines: { node: '>= 0.10.0' } + express@5.0.1: resolution: { integrity: sha512-ORF7g6qGnD+YtUG9yx4DFoqCShNMmUKiXuT5oWMHiOvt/4WFbHC6yCwQMTSBMno7AqntNCAzzcnnjowRkTL9eQ== } engines: { node: '>= 18' } @@ -9913,6 +10322,10 @@ packages: fast-diff@1.3.0: resolution: { integrity: sha512-VxPP4NqbUjj6MaAOafWeUn2cXWLcCtljklUtZf0Ind4XQ+QPtmA0b18zZy0jIQx+ExRVCR/ZQpBmik5lXshNsw== } + fast-equals@5.0.1: + resolution: { integrity: sha512-WF1Wi8PwwSY7/6Kx0vKXtw8RwuSGoM1bvDaJbu7MxDlR1vovZjIAKrnzyrThgAjm6JDTu0fVgWXDlMGspodfoQ== } + engines: { node: '>=6.0.0' } + fast-fifo@1.3.2: resolution: { integrity: sha512-/d9sfos4yxzpwkDkuN7k2SqFKtYNmCTzgfEpz82x34IM9/zc8KGxQoXg1liNC/izpRM/MBdt44Nmx41ZWqk+FQ== } @@ -9935,6 +10348,9 @@ packages: fast-levenshtein@3.0.0: resolution: { integrity: sha512-hKKNajm46uNmTlhHSyZkmToAc56uZJwYq7yrciZjqOxnlfQwERDQJmHPUp7m1m9wx8vgOe8IaCKZ5Kv2k1DdCQ== } + fast-safe-stringify@2.1.1: + resolution: { integrity: sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA== } + fast-xml-parser@4.2.5: resolution: { integrity: sha512-B9/wizE4WngqQftFPmdaMYlXoJlJOYxGQOanC77fq9k8+Z0v5dDSVh+3glErdIROP//s/jgb7ZuxKfB8nVyo0g== } hasBin: true @@ -9997,6 +10413,9 @@ packages: peerDependencies: webpack: ^4.0.0 || ^5.0.0 + file-stream-rotator@0.6.1: + resolution: { integrity: sha512-u+dBid4PvZw17PmDeRcNOtCP9CCK/9lRN2w+r1xIS7yOL9JFrIBKTvrYsxT4P0pGtThYTn++QS5ChHaUov3+zQ== } + file-type@16.5.4: resolution: { integrity: sha512-/yFHK0aGjFEgDJjEKP0pWCplsPFPhwyfwevf/pVxiN0tmE4L9LmwWxWukdJSHdoCli4VgQLehjJtwQBnqmsKcw== } engines: { node: '>=10' } @@ -10215,6 +10634,10 @@ packages: resolution: { integrity: sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g== } engines: { node: '>=12.20.0' } + formidable@3.5.4: + resolution: { integrity: sha512-YikH+7CUTOtP44ZTnUhR7Ic2UASBPOqmaRkRKxRbywPTe5VxF7RRCck4af9wutiZ/QKM5nME9Bie2fFaPz5Gug== } + engines: { node: '>=14.0.0' } + formik@2.4.5: resolution: { integrity: sha512-Gxlht0TD3vVdzMDHwkiNZqJ7Mvg77xQNfmBRrNtvzcHZs72TJppSTDKHpImCMJZwcWPBJ8jSQQ95GJzXFf1nAQ== } peerDependencies: @@ -10582,6 +11005,11 @@ packages: handle-thing@2.0.1: resolution: { integrity: sha512-9Qn4yBxelxoh2Ow62nP+Ka/kMnOXRi8BXnRaUwezLNhqelnN49xKz4F/dPP8OYLxLxq6JDtZb2i9XznUQbNPTg== } + handlebars@4.7.8: + resolution: { integrity: sha512-vafaFqs8MZkRrSX7sFVUdo3ap/eNiLnb4IakshzvP56X5Nr1iGKAIqdX6tMlm6HcNRIkr6AxO5jFEoJzzpT8aQ== } + engines: { node: '>=0.4.7' } + hasBin: true + harmony-reflect@1.6.2: resolution: { integrity: sha512-HIp/n38R9kQjDEziXyDTuW3vvoxxyxjxFzXLrBr18uB47GnSt+G9D29fqrpM5ZkspMcPICud3XsBJQ4Y2URg8g== } @@ -10926,6 +11354,9 @@ packages: immediate@3.0.6: resolution: { integrity: sha512-XXOFtyqDjNDAQxVfYxuF7g9Il/IbWmmlQg2MYKOH8ExIT1qg6xc4zyS3HaEEATgs1btfzxq15ciUiY7gjSXRGQ== } + immer@10.1.1: + resolution: { integrity: sha512-s2MPrmjovJcoMaHtx6K11Ra7oD05NT97w1IC5zpMkT6Atjr7H8LjaDd81iIxUYpMKSRRNMJE703M1Fhr/TctHw== } + immer@9.0.21: resolution: { integrity: sha512-bc4NBHqOqSfRW7POMkHd51LvClaeMXpm8dx0e8oE2GORbq5aRK7Bxl4FyzVLdGtLmvLKL7BTDBG5ACQm4HWjTA== } @@ -10989,6 +11420,10 @@ packages: resolution: { integrity: sha512-NGnrKwXzSms2qUUih/ILZ5JBqNTSa1+ZmP6flaIp6KmSElgE9qdndzS3cqjrDovwFdmwsGsLdeFgB6suw+1e9g== } engines: { node: '>= 0.4' } + internmap@2.0.3: + resolution: { integrity: sha512-5Hh7Y1wQbvY5ooGgPbDaL5iYLAPzMTUrjMulskHLH6wnv/A+1q5rgEaiuqEjB+oxGXIVZs1FF+R/KPN3ZSQYYg== } + engines: { node: '>=12' } + interpret@1.4.0: resolution: { integrity: sha512-agE4QfB2Lkp9uICn7BAqoscw4SZP9kTE2hxiFI3jBPmXJfdqiahTbUuKGsMoN2GtqL9AxhYioAcVvgsb1HvRbA== } engines: { node: '>= 0.10' } @@ -11378,6 +11813,10 @@ packages: resolution: { integrity: sha512-pzqtp31nLv/XFOzXGuvhCb8qhjmTVo5vjVk19XE4CRlSWz0KoeJ3bw9XsA7nOp9YBf4qHjwBxkDzKcME/J29Yg== } engines: { node: '>=8' } + istanbul-lib-instrument@6.0.3: + resolution: { integrity: sha512-Vtgk7L/R2JHyyGW07spoFlB8/lpjiOLTjMdms6AFMraYt3BaJauod/NGrfnVG/y4Ix1JEuMRPDPEj2ua+zz1/Q== } + engines: { node: '>=10' } + istanbul-lib-report@3.0.1: resolution: { integrity: sha512-GCfE1mtsHGOELCU8e/Z7YWzpmybrx/+dSTfLrvY8qRmaY6zXTKWn6WQIjaAFw069icm6GVMNkgu0NzI4iPZUNw== } engines: { node: '>=10' } @@ -11409,10 +11848,18 @@ packages: resolution: { integrity: sha512-buBLMiByfWGCoMsLLzGUUSpAmIAGnbR2KJoMN10ziLhOLvP4e0SlypHnAel8iqQXTrcbmfEY9sSqae5sgUsTvw== } engines: { node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0 } + jest-changed-files@29.7.0: + resolution: { integrity: sha512-fEArFiwf1BpQ+4bXSprcDc3/x4HSzL4al2tozwVpDFpsxALjLYdyiIK4e5Vz66GQJIbXJ82+35PtysofptNX2w== } + engines: { node: ^14.15.0 || ^16.10.0 || >=18.0.0 } + jest-circus@27.5.1: resolution: { integrity: sha512-D95R7x5UtlMA5iBYsOHFFbMD/GVA4R/Kdq15f7xYWUfWHBto9NYRsOvnSauTgdF+ogCpJ4tyKOXhUifxS65gdw== } engines: { node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0 } + jest-circus@29.7.0: + resolution: { integrity: sha512-3E1nCMgipcTkCocFwM90XXQab9bS+GMsjdpmPrlelaxwD93Ad8iVEjX/vvHPdLPnFf+L40u+5+iutRdA1N9myw== } + engines: { node: ^14.15.0 || ^16.10.0 || >=18.0.0 } + jest-cli@27.5.1: resolution: { integrity: sha512-Hc6HOOwYq4/74/c62dEE3r5elx8wjYqxY0r0G/nFrLDPMFRu6RA/u8qINOIkvhxG7mMQ5EJsOGfRpI8L6eFUVw== } engines: { node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0 } @@ -11423,6 +11870,16 @@ packages: node-notifier: optional: true + jest-cli@29.7.0: + resolution: { integrity: sha512-OVVobw2IubN/GSYsxETi+gOe7Ka59EFMR/twOU3Jb2GnKKeMGJB5SGUUrEz3SFVmJASUdZUzy83sLNNQ2gZslg== } + engines: { node: ^14.15.0 || ^16.10.0 || >=18.0.0 } + hasBin: true + peerDependencies: + node-notifier: ^8.0.1 || ^9.0.0 || ^10.0.0 + peerDependenciesMeta: + node-notifier: + optional: true + jest-config@27.5.1: resolution: { integrity: sha512-5sAsjm6tGdsVbW9ahcChPAFCk4IlkQUknH5AvKjuLTSlcO/wCZKyFdn7Rg0EkC+OGgWODEy2hDpWB1PgzH0JNA== } engines: { node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0 } @@ -11432,6 +11889,18 @@ packages: ts-node: optional: true + jest-config@29.7.0: + resolution: { integrity: sha512-uXbpfeQ7R6TZBqI3/TxCU4q4ttk3u0PJeC+E0zbfSoSjq6bJ7buBPxzQPL0ifrkY4DNu4JUdk0ImlBUYi840eQ== } + engines: { node: ^14.15.0 || ^16.10.0 || >=18.0.0 } + peerDependencies: + '@types/node': '*' + ts-node: '>=9.0.0' + peerDependenciesMeta: + '@types/node': + optional: true + ts-node: + optional: true + jest-diff@27.5.1: resolution: { integrity: sha512-m0NvkX55LDt9T4mctTEgnZk3fmEg3NRYutvMPWM/0iPnkFj2wIeF45O1718cMSOFO1vINkqmxqD8vE37uTEbqw== } engines: { node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0 } @@ -11444,10 +11913,18 @@ packages: resolution: { integrity: sha512-rl7hlABeTsRYxKiUfpHrQrG4e2obOiTQWfMEH3PxPjOtdsfLQO4ReWSZaQ7DETm4xu07rl4q/h4zcKXyU0/OzQ== } engines: { node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0 } + jest-docblock@29.7.0: + resolution: { integrity: sha512-q617Auw3A612guyaFgsbFeYpNP5t2aoUNLwBUbc/0kD1R4t9ixDbyFTHd1nok4epoVFpr7PmeWHrhvuV3XaJ4g== } + engines: { node: ^14.15.0 || ^16.10.0 || >=18.0.0 } + jest-each@27.5.1: resolution: { integrity: sha512-1Ff6p+FbhT/bXQnEouYy00bkNSY7OUpfIcmdl8vZ31A1UUaurOLPA8a8BbJOF2RDUElwJhmeaV7LnagI+5UwNQ== } engines: { node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0 } + jest-each@29.7.0: + resolution: { integrity: sha512-gns+Er14+ZrEoC5fhOfYCY1LOHHr0TI+rQUHZS8Ttw2l7gl+80eHc/gFf2Ktkw0+SIACDTeWvpFcv3B04VembQ== } + engines: { node: ^14.15.0 || ^16.10.0 || >=18.0.0 } + jest-environment-jsdom@27.5.1: resolution: { integrity: sha512-TFBvkTC1Hnnnrka/fUb56atfDtJ9VMZ94JkjTbggl1PEpwrYtUBKMezB3inLmWqQsXYLcMwNoDQwoBTAvFfsfw== } engines: { node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0 } @@ -11456,6 +11933,10 @@ packages: resolution: { integrity: sha512-Jt4ZUnxdOsTGwSRAfKEnE6BcwsSPNOijjwifq5sDFSA2kesnXTvNqKHYgM0hDq3549Uf/KzdXNYn4wMZJPlFLw== } engines: { node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0 } + jest-environment-node@29.7.0: + resolution: { integrity: sha512-DOSwCRqXirTOyheM+4d5YZOrWcdu0LNZ87ewUoywbcb2XR4wKgqiG8vNeYwhjFMbEkfju7wx2GYH0P2gevGvFw== } + engines: { node: ^14.15.0 || ^16.10.0 || >=18.0.0 } + jest-get-type@27.5.1: resolution: { integrity: sha512-2KY95ksYSaK7DMBWQn6dQz3kqAf3BB64y2udeG+hv4KfSOb9qwcYQstTJc1KCbsix+wLZWZYN8t7nwX3GOBLRw== } engines: { node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0 } @@ -11468,6 +11949,10 @@ packages: resolution: { integrity: sha512-7GgkZ4Fw4NFbMSDSpZwXeBiIbx+t/46nJ2QitkOjvwPYyZmqttu2TDSimMHP1EkPOi4xUZAN1doE5Vd25H4Jng== } engines: { node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0 } + jest-haste-map@29.7.0: + resolution: { integrity: sha512-fP8u2pyfqx0K1rGn1R9pyE0/KTn+G7PxktWidOBTqFPLYX0b9ksaMFkhK5vrS3DVun09pckLdlx90QthlW7AmA== } + engines: { node: ^14.15.0 || ^16.10.0 || >=18.0.0 } + jest-jasmine2@27.5.1: resolution: { integrity: sha512-jtq7VVyG8SqAorDpApwiJJImd0V2wv1xzdheGHRGyuT7gZm6gG47QEskOlzsN1PG/6WNaCo5pmwMHDf3AkG2pQ== } engines: { node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0 } @@ -11476,6 +11961,10 @@ packages: resolution: { integrity: sha512-POXfWAMvfU6WMUXftV4HolnJfnPOGEu10fscNCA76KBpRRhcMN2c8d3iT2pxQS3HLbA+5X4sOUPzYO2NUyIlHQ== } engines: { node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0 } + jest-leak-detector@29.7.0: + resolution: { integrity: sha512-kYA8IJcSYtST2BY9I+SMC32nDpBT3J2NvWJx8+JCuCdl/CR1I4EKUJROiP8XtCcxqgTTBGJNdbB1A8XRKbTetw== } + engines: { node: ^14.15.0 || ^16.10.0 || >=18.0.0 } + jest-matcher-utils@27.5.1: resolution: { integrity: sha512-z2uTx/T6LBaCoNWNFWwChLBKYxTMcGBRjAt+2SbP929/Fflb9aa5LGma654Rz8z9HLxsrUaYzxE9T/EFIL/PAw== } engines: { node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0 } @@ -11500,6 +11989,10 @@ packages: resolution: { integrity: sha512-K4jKbY1d4ENhbrG2zuPWaQBvDly+iZ2yAW+T1fATN78hc0sInwn7wZB8XtlNnvHug5RMwV897Xm4LqmPM4e2Og== } engines: { node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0 } + jest-mock@29.7.0: + resolution: { integrity: sha512-ITOMZn+UkYS4ZFh83xYAOzWStloNzJFO2s8DWrE4lhtGD+AorgnbkiKERe4wQVBydIGPx059g6riW5Btp6Llnw== } + engines: { node: ^14.15.0 || ^16.10.0 || >=18.0.0 } + jest-pnp-resolver@1.2.3: resolution: { integrity: sha512-+3NpwQEnRoIBtx4fyhblQDPgJI0H1IEIkX7ShLUjPGA7TtUTvI1oiKi3SR4oBR0hQhQR80l4WAe5RrXBwWMA8w== } engines: { node: '>=6' } @@ -11517,22 +12010,42 @@ packages: resolution: { integrity: sha512-4s0IgyNIy0y9FK+cjoVYoxamT7Zeo7MhzqRGx7YDYmaQn1wucY9rotiGkBzzcMXTtjrCAP/f7f+E0F7+fxPNdw== } engines: { node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0 } + jest-regex-util@29.6.3: + resolution: { integrity: sha512-KJJBsRCyyLNWCNBOvZyRDnAIfUiRJ8v+hOBQYGn8gDyF3UegwiP4gwRR3/SDa42g1YbVycTidUF3rKjyLFDWbg== } + engines: { node: ^14.15.0 || ^16.10.0 || >=18.0.0 } + jest-resolve-dependencies@27.5.1: resolution: { integrity: sha512-QQOOdY4PE39iawDn5rzbIePNigfe5B9Z91GDD1ae/xNDlu9kaat8QQ5EKnNmVWPV54hUdxCVwwj6YMgR2O7IOg== } engines: { node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0 } + jest-resolve-dependencies@29.7.0: + resolution: { integrity: sha512-un0zD/6qxJ+S0et7WxeI3H5XSe9lTBBR7bOHCHXkKR6luG5mwDDlIzVQ0V5cZCuoTgEdcdwzTghYkTWfubi+nA== } + engines: { node: ^14.15.0 || ^16.10.0 || >=18.0.0 } + jest-resolve@27.5.1: resolution: { integrity: sha512-FFDy8/9E6CV83IMbDpcjOhumAQPDyETnU2KZ1O98DwTnz8AOBsW/Xv3GySr1mOZdItLR+zDZ7I/UdTFbgSOVCw== } engines: { node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0 } + jest-resolve@29.7.0: + resolution: { integrity: sha512-IOVhZSrg+UvVAshDSDtHyFCCBUl/Q3AAJv8iZ6ZjnZ74xzvwuzLXid9IIIPgTnY62SJjfuupMKZsZQRsCvxEgA== } + engines: { node: ^14.15.0 || ^16.10.0 || >=18.0.0 } + jest-runner@27.5.1: resolution: { integrity: sha512-g4NPsM4mFCOwFKXO4p/H/kWGdJp9V8kURY2lX8Me2drgXqG7rrZAx5kv+5H7wtt/cdFIjhqYx1HrlqWHaOvDaQ== } engines: { node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0 } + jest-runner@29.7.0: + resolution: { integrity: sha512-fsc4N6cPCAahybGBfTRcq5wFR6fpLznMg47sY5aDpsoejOcVYFb07AHuSnR0liMcPTgBsA3ZJL6kFOjPdoNipQ== } + engines: { node: ^14.15.0 || ^16.10.0 || >=18.0.0 } + jest-runtime@27.5.1: resolution: { integrity: sha512-o7gxw3Gf+H2IGt8fv0RiyE1+r83FJBRruoA+FXrlHw6xEyBsU8ugA6IPfTdVyA0w8HClpbK+DGJxH59UrNMx8A== } engines: { node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0 } + jest-runtime@29.7.0: + resolution: { integrity: sha512-gUnLjgwdGqW7B4LvOIkbKs9WGbn+QLqRQQ9juC6HndeDiezIwhDP+mhMwHWCEcfQ5RUXa6OPnFF8BJh5xegwwQ== } + engines: { node: ^14.15.0 || ^16.10.0 || >=18.0.0 } + jest-serializer@27.5.1: resolution: { integrity: sha512-jZCyo6iIxO1aqUxpuBlwTDMkzOAJS4a3eYz3YzgxxVQFwLeSA7Jfq5cbqCY+JLvTDrWirgusI/0KwxKMgrdf7w== } engines: { node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0 } @@ -11541,6 +12054,10 @@ packages: resolution: { integrity: sha512-yYykXI5a0I31xX67mgeLw1DZ0bJB+gpq5IpSuCAoyDi0+BhgU/RIrL+RTzDmkNTchvDFWKP8lp+w/42Z3us5sA== } engines: { node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0 } + jest-snapshot@29.7.0: + resolution: { integrity: sha512-Rm0BMWtxBcioHr1/OX5YCP8Uov4riHvKPknOGs804Zg9JGZgmIBkbtlxJC/7Z4msKYVbIJtfU+tKb8xlYNfdkw== } + engines: { node: ^14.15.0 || ^16.10.0 || >=18.0.0 } + jest-util@27.5.1: resolution: { integrity: sha512-Kv2o/8jNvX1MQ0KGtw480E/w4fBCDOnH6+6DmeKi6LZUIlKA5kwY0YNdlzaWTiVgxqAqik11QyxDOKk543aKXw== } engines: { node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0 } @@ -11557,6 +12074,10 @@ packages: resolution: { integrity: sha512-thkNli0LYTmOI1tDB3FI1S1RTp/Bqyd9pTarJwL87OIBFuqEb5Apv5EaApEudYg4g86e3CT6kM0RowkhtEnCBQ== } engines: { node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0 } + jest-validate@29.7.0: + resolution: { integrity: sha512-ZB7wHqaRGVw/9hST/OuFUReG7M8vKeq0/J2egIGLdvjHCmYqGARhzXmtgi+gVeZ5uXFF219aOc3Ls2yLg27tkw== } + engines: { node: ^14.15.0 || ^16.10.0 || >=18.0.0 } + jest-watch-typeahead@1.1.0: resolution: { integrity: sha512-Va5nLSJTN7YFtC2jd+7wsoe1pNe5K4ShLux/E5iHEwlB9AxaxmggY7to9KUqKojhaJw3aXqt5WAb4jGPOolpEw== } engines: { node: ^12.22.0 || ^14.17.0 || >=16.0.0 } @@ -11571,6 +12092,10 @@ packages: resolution: { integrity: sha512-t4qcqj9hze+jviFPUN3YAtAEeFnr/azITXQEMARf5cMwKY2SMBRnCQTXLixTl20OR6mLh9KLMrgVJgJISym+1g== } engines: { node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0 } + jest-watcher@29.7.0: + resolution: { integrity: sha512-49Fg7WXkU3Vl2h6LbLtMQ/HyB6rXSIX7SqvBLQmssRBGN9I0PNvPmAmCWSOY6SOvrjhI/F7/bGAv9RtnsPA03g== } + engines: { node: ^14.15.0 || ^16.10.0 || >=18.0.0 } + jest-worker@26.6.2: resolution: { integrity: sha512-KWYVV1c4i+jbMpaBC+U++4Va0cp8OisU185o73T1vo99hqi7w8tSJfUXYswwqqrjzwxa6KpRK54WhPvwf5w6PQ== } engines: { node: '>= 10.13.0' } @@ -11583,6 +12108,10 @@ packages: resolution: { integrity: sha512-CqRA220YV/6jCo8VWvAt1KKx6eek1VIHMPeLEbpcfSfkEeWyBNppynM/o6q+Wmw+sOhos2ml34wZbSX3G13//g== } engines: { node: ^12.13.0 || ^14.15.0 || ^16.10.0 || >=17.0.0 } + jest-worker@29.7.0: + resolution: { integrity: sha512-eIz2msL/EzL9UFTFFx7jBTkeZfku0yUAyZZZmJ93H2TYEiroIx2PQjEXcwYtYl8zXCxb+PAmA2hLIt/6ZEkPHw== } + engines: { node: ^14.15.0 || ^16.10.0 || >=18.0.0 } + jest@27.5.1: resolution: { integrity: sha512-Yn0mADZB89zTtjkPJEXwrac3LHudkQMR+Paqa8uxJHCBr9agxztUifWCyiYrjhMPBoUVBjyny0I7XH6ozDr7QQ== } engines: { node: ^10.13.0 || ^12.13.0 || ^14.15.0 || >=15.0.0 } @@ -11593,6 +12122,16 @@ packages: node-notifier: optional: true + jest@29.7.0: + resolution: { integrity: sha512-NIy3oAFp9shda19hy4HK0HRTWKtPJmGdnvywu01nOqNC2vZg+Z+fvJDxpMQA88eb2I9EcafcdjYgsDthnYTvGw== } + engines: { node: ^14.15.0 || ^16.10.0 || >=18.0.0 } + hasBin: true + peerDependencies: + node-notifier: ^8.0.1 || ^9.0.0 || ^10.0.0 + peerDependenciesMeta: + node-notifier: + optional: true + jiti@1.21.0: resolution: { integrity: sha512-gFqAIbuKyyso/3G2qhiO2OM6shY6EPP/R0+mkDbyspxKazh8BXDC5FiFsUjlczgdNz/vfra0da2y+aHrusLG/Q== } hasBin: true @@ -11737,8 +12276,8 @@ packages: resolution: { integrity: sha512-p/nXbhSEcu3pZRdkW1OfJhpsVtW1gd4Wa1fnQc9YLiTfAjn0312eMKimbdIQzuZl9aa9xUGaRlP9T/CJE/ditQ== } engines: { node: '>=0.10.0' } - jsonrepair@3.11.2: - resolution: { integrity: sha512-ejydGcTq0qKk1r0NUBwjtvswbPFhs19+QEfwSeGwB8KJZ59W7/AOFmQh04c68mkJ+2hGk+OkOmkr2bKG4tGlLQ== } + jsonrepair@3.12.0: + resolution: { integrity: sha512-SWfjz8SuQ0wZjwsxtSJ3Zy8vvLg6aO/kxcp9TWNPGwJKgTZVfhNEQBMk/vPOpYCDFWRxD6QWuI6IHR1t615f0w== } hasBin: true jsonwebtoken@9.0.2: @@ -11780,6 +12319,10 @@ packages: jwt-decode@3.1.2: resolution: { integrity: sha512-UfpWE/VZn0iP50d8cz9NrZLM9lSWhcJ+0Gt/nm4by88UL+J1SiKN8/5dkjMmbEzwL2CAe+67GsegCbIKtbp75A== } + jwt-decode@4.0.0: + resolution: { integrity: sha512-+KJGIyHgkGuIq3IEBNftfhW/LfWhXUIY6OmyVWjliu5KH1y0fw7VQ8YndE2O4qZdMSd9SqbnC8GOcZEy0Om7sA== } + engines: { node: '>=18' } + katex@0.16.9: resolution: { integrity: sha512-fsSYjWS0EEOwvy81j3vRA8TEAhQhKiqO+FQaKWp0m39qwOzHVBgAUBIXWj1pB+O2W3fIpNa6Y9KSKCVbfPhyAQ== } hasBin: true @@ -11787,6 +12330,9 @@ packages: keyv@4.5.4: resolution: { integrity: sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw== } + keyv@5.3.2: + resolution: { integrity: sha512-Lji2XRxqqa5Wg+CHLVfFKBImfJZ4pCSccu9eVWK6w4c2SDFLd8JAn1zqTuSFnsxb7ope6rMsnIHfp+eBbRBRZQ== } + kill-port@2.0.1: resolution: { integrity: sha512-e0SVOV5jFo0mx8r7bS29maVWp17qGqLBZ5ricNSajON6//kmb7qqqNnml4twNE8Dtj97UQD+gNFOaipS/q1zzQ== } hasBin: true @@ -11822,8 +12368,8 @@ packages: kuler@2.0.0: resolution: { integrity: sha512-Xq9nH7KlWZmXAtodXDDRE7vs6DU1gTU8zYDHDiWLSip45Egwq3plLHzPn27NgvzL2r1LMPC1vdqh98sQxtqj4A== } - langchain@0.3.5: - resolution: { integrity: sha512-Gq0xC45Sq6nszS8kQG9suCrmBsuXH0INMmiF7D2TwPb6mtG35Jiq4grCk9ykpwPsarTHdty3SzUbII/FqiYSSw== } + langchain@0.3.6: + resolution: { integrity: sha512-erZOIKXzwCOrQHqY9AyjkQmaX62zUap1Sigw1KrwMUOnVoLKkVNRmAyxFlNZDZ9jLs/58MaQcaT9ReJtbj3x6w== } engines: { node: '>=18' } peerDependencies: '@langchain/anthropic': '*' @@ -11889,16 +12435,8 @@ packages: resolution: { integrity: sha512-pLwepjtA7ki4aK20L1KqbJi55f10KVHHOSPAqzoNnAZqWv/YlHyxHhNrY/Nkxb+rM+hKLZNBMpmjlgvceEQtvw== } hasBin: true - langsmith@0.2.15: - resolution: { integrity: sha512-homtJU41iitqIZVuuLW7iarCzD4f39KcfP9RTBWav9jifhrsDa1Ez89Ejr+4qi72iuBu8Y5xykchsGVgiEZ93w== } - peerDependencies: - openai: 4.96.0 - peerDependenciesMeta: - openai: - optional: true - - langsmith@0.2.5: - resolution: { integrity: sha512-dA+l7ZEh1Q9Q9FcE39PUSSEMfsFo73R2V81fRo5KSlGNcypOEhoQvv6lbjyZP7MHmt3/9pPcfpuRd5Y4RbFYqQ== } + langsmith@0.2.8: + resolution: { integrity: sha512-wKVNZoYtd8EqQWUEsfDZlZ77rH7vVqgNtONXRwynUp7ZFMFUIPhSlqE9pXqrmYPE8ZTBFj7diag2lFgUuaOEKw== } peerDependencies: openai: 4.96.0 peerDependenciesMeta: @@ -12187,6 +12725,10 @@ packages: resolution: { integrity: sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA== } engines: { node: '>=12' } + lru-cache@8.0.5: + resolution: { integrity: sha512-MhWWlVnuab1RG5/zMRRcVGXZLCXrZTgfwMikgzCegsPnG62yDQo5JnqKkrK4jO5iKqDAZGItAqN5CtKBCBWRUA== } + engines: { node: '>=16.14' } + lru-cache@9.1.2: resolution: { integrity: sha512-ERJq3FOzJTxBbFjZ7iDs+NiK4VI9Wz+RdrrAB8dio1oV+YvdPzUEE4QNiT2VD51DkIbCYRUUzCRkssXCHqSnKQ== } engines: { node: 14 || >=16.14 } @@ -12392,11 +12934,13 @@ packages: resolution: { integrity: sha512-GftCCBs6EN8sz3BoWO1bCj8t7YBtT713d8bUgbhg9Iel5kFSqnSvCK06TYIDJAtJ51cSiWkM/YemlT0dfoFycw== } engines: { node: '>=12' } - mem0ai@2.1.12: - resolution: { integrity: sha512-lFHgiEZkRh+WjsuVjhFCC4LAzTbCrAACoaIJccwX7qkTGl17N+8zLmWY+ozKKnXMVgRbyfjLm8XUdAPjs+RJSw== } + mem0ai@2.1.16: + resolution: { integrity: sha512-rgNKFSBnEroeqOT3fYM26hs53W5IA72Ca/JA+5YoX7BeE5yL/KeDVrO3LbseFUGABpp4rFrvzjfE92u+sAdpvw== } engines: { node: '>=18' } peerDependencies: '@anthropic-ai/sdk': 0.18.0 + '@google/genai': ^0.7.0 + '@mistralai/mistralai': ^1.5.2 '@qdrant/js-client-rest': 1.13.0 '@supabase/supabase-js': ^2.49.1 '@types/jest': 29.5.14 @@ -12406,7 +12950,7 @@ packages: neo4j-driver: ^5.28.1 ollama: ^0.5.14 pg: 8.11.3 - redis: 4.7.0 + redis: ^4.6.13 sqlite3: 5.1.7 memfs@3.5.3: @@ -12585,6 +13129,11 @@ packages: engines: { node: '>=4' } hasBin: true + mime@2.6.0: + resolution: { integrity: sha512-USPkMeET31rOMiarsBNIHZKLGgvKc/LrjofAnBlOttf5ajRvqiRA8QsenbcooctK6d6Ts6aqZXBA+XbkKthiQg== } + engines: { node: '>=4.0.0' } + hasBin: true + mime@3.0.0: resolution: { integrity: sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A== } engines: { node: '>=10.0.0' } @@ -12848,6 +13397,10 @@ packages: mute-stream@0.0.8: resolution: { integrity: sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA== } + mysql2@3.10.2: + resolution: { integrity: sha512-KCXPEvAkO0RcHPr362O5N8tFY2fXvbjfkPvRY/wGumh4EOemo9Hm5FjQZqv/pCmrnuxGu5OxnSENG0gTXqKMgQ== } + engines: { node: '>= 8.0' } + mysql2@3.11.4: resolution: { integrity: sha512-Z2o3tY4Z8EvSRDwknaC40MdZ3+m0sKbpnXrShQLdxPrAvcNli7jLrD2Zd2IzsRMw4eK9Yle500FDmlkIqp+krg== } engines: { node: '>= 8.0' } @@ -13009,6 +13562,10 @@ packages: node-releases@2.0.14: resolution: { integrity: sha512-y10wOWt8yZpqXmOgRo77WaHEmhYQYGNA6y421PKsKYWEK8aW+cqAphborZDhqfyKrbZEN92CN1X2KbafY2s7Yw== } + nodemailer@6.9.15: + resolution: { integrity: sha512-AHf04ySLC6CIfuRtRiEYtGEXgRfa6INgWGluDhnxTZhHSKvrBu7lc1VVchQ0d8nPc4cFaZoPq8vkyNoZr0TpGQ== } + engines: { node: '>=6.0.0' } + nodemon@2.0.22: resolution: { integrity: sha512-B8YqaKMmyuCO7BowF1Z1/mkPqLk6cs/l63Ojtd6otKjMx47Dq1utxfRxcavH1I7VSaL8n5BUaoutadnsX3AAVQ== } engines: { node: '>=8.10.0' } @@ -13169,6 +13726,12 @@ packages: nwsapi@2.2.7: resolution: { integrity: sha512-ub5E4+FBPKwAZx0UwIQOjYWGHTEq5sPqHQNRN8Z9e4A7u3Tj1weLJsL59yH9vmvqEtBHaOmT6cYQKIZOxp35FQ== } + oauth@0.10.0: + resolution: { integrity: sha512-1orQ9MT1vHFGQxhuy7E/0gECD3fd2fCC+PIX+/jgmU/gI3EpRocXtmtvxCO5x3WZ443FLTLFWNDjl5MPJf9u+Q== } + + oauth@0.9.15: + resolution: { integrity: sha512-a5ERWK1kh38ExDEfoO6qUHJb32rd7aYmPHuyCu3Fta/cnICvYmgd2uhuKXvPD+PXB+gCEYYEaQdIRAjCOwAKNA== } + object-assign@4.1.1: resolution: { integrity: sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg== } engines: { node: '>=0.10.0' } @@ -13510,6 +14073,52 @@ packages: resolution: { integrity: sha512-XHXfu/yOQRy9vYOtUDVMN60OEJjW013GoObG1o+xwQTpB9eYJX/BjXMsdW13ZDPruFhYYn0AG22w0xgQMwl3Nw== } engines: { node: '>=0.10.0' } + passport-auth0@1.4.4: + resolution: { integrity: sha512-PFkjMfsfXSwgn94QCrZl2hObRHiqrAJffyeUvI8e8HqTG7MfOlyzWO3wSL5dlH+MUGR5+DQr+vtXFFu6Sx8cfg== } + + passport-cookie@1.0.9: + resolution: { integrity: sha512-8a6foX2bbGoJzup0RAiNcC2tTqzYS46RQEK3Z4u8p86wesPUjgDaji3C7+5j4TGyCq4ZoOV+3YLw1Hy6cV6kyw== } + engines: { node: '>= 0.10.0' } + + passport-github@1.1.0: + resolution: { integrity: sha512-XARXJycE6fFh/dxF+Uut8OjlwbFEXgbPVj/+V+K7cvriRK7VcAOm+NgBmbiLM9Qv3SSxEAV+V6fIk89nYHXa8A== } + engines: { node: '>= 0.4.0' } + + passport-google-oauth20@2.0.0: + resolution: { integrity: sha512-KSk6IJ15RoxuGq7D1UKK/8qKhNfzbLeLrG3gkLZ7p4A6DBCcv7xpyQwuXtWdpyR0+E0mwkpjY1VfPOhxQrKzdQ== } + engines: { node: '>= 0.4.0' } + + passport-jwt@4.0.1: + resolution: { integrity: sha512-UCKMDYhNuGOBE9/9Ycuoyh7vP6jpeTp/+sfMJl7nLff/t6dps+iaeE0hhNkKN8/HZHcJ7lCdOyDxHdDoxoSvdQ== } + + passport-local@1.0.0: + resolution: { integrity: sha512-9wCE6qKznvf9mQYYbgJ3sVOHmCWoUNMVFoZzNoznmISbhnNNPhN9xfY3sLmScHMetEJeoY7CXwfhCe7argfQow== } + engines: { node: '>= 0.4.0' } + + passport-oauth1@1.3.0: + resolution: { integrity: sha512-8T/nX4gwKTw0PjxP1xfD0QhrydQNakzeOpZ6M5Uqdgz9/a/Ag62RmJxnZQ4LkbdXGrRehQHIAHNAu11rCP46Sw== } + engines: { node: '>= 0.4.0' } + + passport-oauth2@1.8.0: + resolution: { integrity: sha512-cjsQbOrXIDE4P8nNb3FQRCCmJJ/utnFKEz2NX209f7KOHPoX18gF7gBzBbLLsj2/je4KrgiwLLGjf0lm9rtTBA== } + engines: { node: '>= 0.4.0' } + + passport-oauth@1.0.0: + resolution: { integrity: sha512-4IZNVsZbN1dkBzmEbBqUxDG8oFOIK81jqdksE3HEb/vI3ib3FMjbiZZ6MTtooyYZzmKu0BfovjvT1pdGgIq+4Q== } + engines: { node: '>= 0.4.0' } + + passport-openidconnect@0.1.2: + resolution: { integrity: sha512-JX3rTyW+KFZ/E9OF/IpXJPbyLO9vGzcmXB5FgSP2jfL3LGKJPdV7zUE8rWeKeeI/iueQggOeFa3onrCmhxXZTg== } + engines: { node: '>= 0.6.0' } + + passport-strategy@1.0.0: + resolution: { integrity: sha512-CB97UUvDKJde2V0KDWWB3lyf6PC3FaZP7YxZ2G8OAtn9p4HI9j9JLP9qjOGZFvyl8uwNT8qM+hGnz/n16NI7oA== } + engines: { node: '>= 0.4.0' } + + passport@0.7.0: + resolution: { integrity: sha512-cPLl+qZpSc+ireUvt+IzqbED1cHHkDoVYMo30jbJIdOOjQ1MQYZBPiNvmi8UM6lJuOpTPXJGZQk0DtC4y61MYQ== } + engines: { node: '>= 0.4.0' } + password-prompt@1.1.3: resolution: { integrity: sha512-HkrjG2aJlvF0t2BMH0e2LB/EHf3Lcq3fNMzy4GYHcQblAvOl+QQji1Lx7WRBMqpVK8p+KR7bCg7oqAMXtdgqyw== } @@ -13580,6 +14189,9 @@ packages: pause-stream@0.0.11: resolution: { integrity: sha512-e3FBlXLmN/D1S+zHzanP4E/4Z60oFAa3O051qt1pxa7DEJWKAyil6upYVXCWadEnuoqa4Pkc9oUx9zsxYeRv8A== } + pause@0.0.1: + resolution: { integrity: sha512-KG8UEiEVkR3wGEb4m5yZkVCzigAD+cVEJck2CzYZO37ZGJfctvVptVO192MwrtPhzONn6go8ylnOdMhKqi4nfg== } + pdf-parse@1.1.1: resolution: { integrity: sha512-v6ZJ/efsBpGrGGknjtq9J/oC8tZWq0KWL5vQrk2GlzLEQPUDB1ex+13Rmidl1neNN358Jn9EHZw5y07FFtaC7A== } engines: { node: '>=6.8.1' } @@ -13620,9 +14232,6 @@ packages: pg-connection-string@2.6.2: resolution: { integrity: sha512-ch6OwaeaPYcova4kKZ15sbJ2hKb/VP48ZD2gE7i1J+L4MspCtBMAx8nMgz7bksc7IojCIIWuEhHibSMFH8m8oA== } - pg-connection-string@2.6.4: - resolution: { integrity: sha512-v+Z7W/0EO707aNMaAEfiGnGL9sxxumwLl2fJvCQtMn9Fxsg+lPpPkdcyBSv/KFgpGdYkMfn+EI1Or2EHjpgLCA== } - pg-connection-string@2.7.0: resolution: { integrity: sha512-PI2W9mv53rXJQEOb8xNR8lH7Hr+EKa6oJa38zsK0S/ky2er16ios1wLKhZyxzD7jUReiWokc9WK5nxSnC7W1TA== } @@ -13639,11 +14248,6 @@ packages: peerDependencies: pg: '>=8.0' - pg-pool@3.6.2: - resolution: { integrity: sha512-Htjbg8BlwXqSBQ9V8Vjtc+vzf/6fVUuak/3/XXKA9oxZprwW3IMDQTGHP+KDmVL7rtd+R1QjbnCFPuTHm3G4hg== } - peerDependencies: - pg: '>=8.0' - pg-pool@3.7.1: resolution: { integrity: sha512-xIOsFoh7Vdhojas6q3596mXFsR8nwBQBXX5JiV7p9buEVAGqYL4yFzclON5P9vFrpu1u7Zwl2oriyDa89n0wbw== } peerDependencies: @@ -13652,9 +14256,6 @@ packages: pg-protocol@1.6.0: resolution: { integrity: sha512-M+PDm637OY5WM307051+bsDia5Xej6d9IR4GwJse1qA1DIhiKlksvrneZOYQq42OM+spubpcNYEo2FcKQrDk+Q== } - pg-protocol@1.6.1: - resolution: { integrity: sha512-jPIlvgoD63hrEuihvIg+tJhoGjUsLPn6poJY9N5CnlPd91c2T18T/9zBtLxZSb1EhYxBRoZJtzScCaWlYLtktg== } - pg-protocol@1.7.1: resolution: { integrity: sha512-gjTHWGYWsEgy9MsY0Gp6ZJxV24IjDqdpTW7Eh0x+WfJLFsm/TJx1MzL6T0D88mBvkpxotCQ6TwW6N+Kko7lhgQ== } @@ -14544,17 +15145,23 @@ packages: puppeteer@20.9.0: resolution: { integrity: sha512-kAglT4VZ9fWEGg3oLc4/de+JcONuEJhlh3J6f5R1TLkrY/EHHIHxWXDOzXvaxQCtedmyVXBwg8M+P8YCO/wZjw== } engines: { node: '>=16.3.0' } - deprecated: < 21.5.0 is no longer supported + deprecated: < 22.8.2 is no longer supported pure-color@1.3.0: resolution: { integrity: sha512-QFADYnsVoBMw1srW7OVKEYjG+MbIa49s54w1MA1EDY6r2r/sTcKKYqRX1f4GYvnXP7eN/Pe9HFcX+hwzmrXRHA== } + pure-rand@6.1.0: + resolution: { integrity: sha512-bVWawvoZoBYpp6yIoQtQXHZjmz35RSVHnUOTefl8Vcjr8snTPY1wnpSPMWekcFwbxI6gtmT7rSYPFvz71ldiOA== } + pyodide@0.25.0: resolution: { integrity: sha512-RagtX3TfV2M0QAfThG2SMvwE31ikqAFDUXc5/4xhppEoVf4VbL7L0kbKOIdSNg7MbVsHELVxftk66WvT926GpA== } q@1.5.1: resolution: { integrity: sha512-kV/CThkXo6xyFEZUugw/+pIOywXcDbFYgSct5cT3gqlbkBE1SJdwy6UQoZvodiWF/ckQLZyDE/Bu1M6gVu5lVw== } engines: { node: '>=0.6.0', teleport: '>=0.2.0' } + deprecated: |- + You or someone you depend on is using Q, the JavaScript Promise library that gave JavaScript developers strong feelings about promises. They can almost certainly migrate to the native JavaScript promise now. Thank you literally everyone for joining me in this bet against the odds. Be excellent to each other. + (For a CapTP with native promises, see @endo/eventual-send and @endo/captp) qs@6.10.4: resolution: { integrity: sha512-OQiU+C+Ds5qiH91qh/mg0w+8nwQuLjM4F4M/PbmhDOoYehPh+Fb0bDjtR1sOvy7YKxvj28Y/M0PhP5uVX0kB+g== } @@ -14568,6 +15175,10 @@ packages: resolution: { integrity: sha512-tDNIz22aBzCDxLtVH++VnTfzxlfeK5CbqohpSqpJgj1Wg/cQbStNAz3NuqCs5vV+pjBsK4x4pN9HlVh7rcYRiA== } engines: { node: '>=0.6' } + qs@6.12.1: + resolution: { integrity: sha512-zWmv4RSuB9r2mYQw3zxQuHWeU+42aKi1wWig/j4ele4ygELZ7PEO6MM7rim9oAQH2A5MWfsAVf/jPvTPgCbvUQ== } + engines: { node: '>=0.6' } + qs@6.13.0: resolution: { integrity: sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg== } engines: { node: '>=0.6' } @@ -14604,6 +15215,10 @@ packages: rake-modified@1.0.8: resolution: { integrity: sha512-rj/1t+EyI8Ly52eaCeSy5hoNpdNnDlNQ/+jll2DypR6nkuxotMbaupzwbuMSaXzuSL1I2pYVYy7oPus/Ls49ag== } + random-bytes@1.0.0: + resolution: { integrity: sha512-iv7LhNVO047HzYR3InF6pUcUsPQiHTM1Qal51DcGSuZFBil1aBBWG5eHPNek7bvILMaYJ/8RU1e8w1AMdHmLQQ== } + engines: { node: '>= 0.8' } + randombytes@2.1.0: resolution: { integrity: sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ== } @@ -14808,6 +15423,12 @@ packages: typescript: optional: true + react-smooth@4.0.1: + resolution: { integrity: sha512-OE4hm7XqR0jNOq3Qmk9mFLyd6p2+j6bvbPJ7qlB7+oo0eNcL2l7WQzG6MBnT3EXY6xzkLMUBec3AfewJdA0J8w== } + peerDependencies: + react: ^16.8.0 || ^17.0.0 || ^18.0.0 + react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 + react-syntax-highlighter@15.5.0: resolution: { integrity: sha512-+zq2myprEnQmH5yw6Gqc8lD55QHnpKaU8TOcFeC/Lg/MQSs8UknEA0JC4nTZGFAXC2J2Hyj/ijJ7NlabyPi2gg== } peerDependencies: @@ -14858,6 +15479,7 @@ packages: read-package-json@6.0.4: resolution: { integrity: sha512-AEtWXYfopBj2z5N5PbkAOeNHRPUg5q+Nen7QLxV8M2zJq1ym6/lCz3fYNTCXe19puu2d06jfHhrP7v/S2PtMMw== } engines: { node: ^14.17.0 || ^16.13.0 || >=18.0.0 } + deprecated: This package is no longer supported. Please use @npmcli/package-json instead. read-pkg-up@1.0.1: resolution: { integrity: sha512-WD9MTlNtI55IwYUS27iHh9tK3YoIVhxis8yKhLpTqWtml739uXc9NWTpxoHkfZf3+DkCCsXox94/VWZniuZm6A== } @@ -14902,6 +15524,16 @@ packages: resolution: { integrity: sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA== } engines: { node: '>=8.10.0' } + recharts-scale@0.4.5: + resolution: { integrity: sha512-kivNFO+0OcUNu7jQquLXAxz1FIwZj8nrj+YkOKc5694NbjCvcT6aSZiIzNzd2Kul4o4rTto8QVR9lMNtxD4G1w== } + + recharts@2.12.7: + resolution: { integrity: sha512-hlLJMhPQfv4/3NBSAyq3gzGg4h2v69RJh6KU7b3pXYNNAELs9kEoXOjbkxdXpALqKBoVmVptGfLpxdaVYqjmXQ== } + engines: { node: '>=14' } + peerDependencies: + react: ^16.0.0 || ^17.0.0 || ^18.0.0 + react-dom: ^16.0.0 || ^17.0.0 || ^18.0.0 + rechoir@0.6.2: resolution: { integrity: sha512-HFM8rkZ+i3zrV+4LQjwQ0W+ez98pApMGM3HUrN04j3CqzPOzl9nmP15Y8YXNm8QHGv/eacOVEjqhmWpkRV0NAw== } engines: { node: '>= 0.10' } @@ -14931,14 +15563,26 @@ packages: redis@4.6.13: resolution: { integrity: sha512-MHgkS4B+sPjCXpf+HfdetBwbRz6vCtsceTmw1pHNYJAsYxrfpOP6dz+piJWGos8wqG7qb3vj/Rrc5qOlmInUuA== } + redis@4.7.0: + resolution: { integrity: sha512-zvmkHEAdGMn+hMRXuMBtu4Vo5P6rHQjLoHftu+lBqq8ZTA3RCVC/WzD790bkKKiNFp7d5/9PcSD19fJyyRvOdQ== } + + redux-thunk@3.1.0: + resolution: { integrity: sha512-NW2r5T6ksUKXCabzhL9z+h206HQw/NJkcLm1GPImRQ8IzfXwRGqjVhKJGauHirT0DAuyy6hjdnMZaRoAcy0Klw== } + peerDependencies: + redux: ^5.0.0 + redux@4.2.1: resolution: { integrity: sha512-LAUYz4lc+Do8/g7aeRa8JkyDErK6ekstQaqWQrNRW//MY1TvCEpMtpTWvlQ+FPbWCx+Xixu/6SHt5N0HR+SB4w== } + redux@5.0.1: + resolution: { integrity: sha512-M9/ELqF6fy8FwmkpnF0S3YKOqMyoWJ4+CS5Efg2ct3oY9daQvd/Pc71FpGZsVsbl3Cpb+IIcjBDUnnyBdQbq4w== } + reflect-metadata@0.1.14: resolution: { integrity: sha512-ZhYeb6nRaXCfhnndflDK8qI6ZQ/YcWZCISRAWICW9XYqMUwjZM9Z0DveWX/ABN01oxSHwVxKQmxeYZSsm0jh5A== } reflect-metadata@0.2.1: resolution: { integrity: sha512-i5lLI6iw9AU3Uu4szRNPPEkomnkjRTaVt9hy/bn5g/oSzekBSMeLZblcjP74AW0vBabqERLLIrz+gR8QYR54Tw== } + deprecated: This version has a critical bug in fallback handling. Please upgrade to reflect-metadata@0.2.2 or newer. reflect.getprototypeof@1.0.5: resolution: { integrity: sha512-62wgfC8dJWrmxv44CA36pLDnP6KKl3Vhxb7PL+8+qrrFMMoJij4vgiMP8zV4O8+CBMXY1mHxI5fITGHXFHVmQQ== } @@ -15065,6 +15709,9 @@ packages: reselect@4.1.8: resolution: { integrity: sha512-ab9EmR80F/zQTMNeneUr4cv+jSwPJgIlvEmVwLerwrWVbpLlBuls9XHzIeTFy4cegU2NHBp3va0LKOzU5qFEYQ== } + reselect@5.1.1: + resolution: { integrity: sha512-K/BG6eIky/SBpzfHZv/dd+9JBFiS4SWV7FIujVyJRux6e45+73RaUHXLmIR1f7WOMaQ0U1km6qwklRQxpJJY0w== } + resolve-alpn@1.2.1: resolution: { integrity: sha512-0a1F4l73/ZFZOakJnQ3FvkJ2+gSTQWz/r2KE5OdDY0TxPm5h4GkqkWWfM47T7HsbnOtcJVEF4epCVy6u7Q3K+g== } @@ -15108,6 +15755,10 @@ packages: resolution: { integrity: sha512-/NtpHNDN7jWhAaQ9BvBUYZ6YTXsRBgfqWFWP7BZBaoMJO/I3G5OFzvTuWNlZC3aPjins1F+TNrLKsGbH4rfsRQ== } engines: { node: '>=10' } + resolve.exports@2.0.3: + resolution: { integrity: sha512-OcXjMsGdhL4XnbShKpAcSqPMzQoYkYyhbEaeSko47MjRP9NfEQMhZkXL1DoFlt9LWQn4YttrdnV6X2OiyzBi+A== } + engines: { node: '>=10' } + resolve@1.22.8: resolution: { integrity: sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw== } hasBin: true @@ -15158,6 +15809,7 @@ packages: rimraf@3.0.2: resolution: { integrity: sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA== } + deprecated: Rimraf versions prior to v4 are no longer supported hasBin: true rimraf@5.0.5: @@ -15598,6 +16250,9 @@ packages: source-map-support@0.4.18: resolution: { integrity: sha512-try0/JqxPLF9nOjvSta7tVondkP5dwgyLDjVoyMDlmjugT2lRZ1OfsrYTkCd2hkDnJTKRbO/Rl3orm8vlsUzbA== } + source-map-support@0.5.13: + resolution: { integrity: sha512-SHSKFHadjVA5oR4PPqhtAVdcBWwRYVd6g6cAXnIbRiIwc2EhPrTuKUBdSLvlEKyIP3GCf89fltvcZiP9MMFA1w== } + source-map-support@0.5.21: resolution: { integrity: sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w== } @@ -15914,6 +16569,10 @@ packages: resolution: { integrity: sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig== } engines: { node: '>=8' } + stripe@15.12.0: + resolution: { integrity: sha512-slTbYS1WhRJXVB8YXU8fgHizkUrM9KJyrw4Dd8pLEwzKHYyQTIE46EePC2MVbSDZdE24o1GdNtzmJV4PrPpmJA== } + engines: { node: '>=12.*' } + stripe@17.3.1: resolution: { integrity: sha512-E9/u+GFBPkYnTmfFCoKX3+gP4R3SkZoGunHe4cw9J+sqkj5uxpLFf1LscuI9BuEyIQ0PFAgPTHavgQwRtOvnag== } engines: { node: '>=12.*' } @@ -15973,6 +16632,18 @@ packages: engines: { node: '>=16 || 14 >=14.17' } hasBin: true + superagent@9.0.2: + resolution: { integrity: sha512-xuW7dzkUpcJq7QnhOsnNUgtYp3xRwpt2F7abdRYIpCsAt0hhUqia0EdxyXZQQpNmGtsCzYHryaKSV3q3GJnq7w== } + engines: { node: '>=14.18.0' } + + supergateway@3.0.1: + resolution: { integrity: sha512-QCoN4/0tOmYaEf5F9rfohq8AFksO6W9d0KQey6oV2HFMotEyw2T2znsDA6xGWcMexaIsm294jbEsh6AAmG/vXQ== } + hasBin: true + + supertest@7.1.0: + resolution: { integrity: sha512-5QeSO8hSrKghtcWEoPiO036fxH0Ii2wVQfFZSP0oqQhmjk8bOLhDFXr4JrvaFmPuEWUoq4znY3uSi8UzLKxGqw== } + engines: { node: '>=14.18.0' } + supports-color@2.0.0: resolution: { integrity: sha512-KKNVtd6pCYgPIKU4cp2733HWYCpplQhddZLBUryaAHou723x+FRzQ5Df824Fj+IyyuiQTRoub4SnIFfIcrp70g== } engines: { node: '>=0.8.0' } @@ -16320,6 +16991,30 @@ packages: ts-interface-checker@0.1.13: resolution: { integrity: sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA== } + ts-jest@29.3.2: + resolution: { integrity: sha512-bJJkrWc6PjFVz5g2DGCNUo8z7oFEYaz1xP1NpeDU7KNLMWPpEyV8Chbpkn8xjzgRDpQhnGMyvyldoL7h8JXyug== } + engines: { node: ^14.15.0 || ^16.10.0 || ^18.0.0 || >=20.0.0 } + hasBin: true + peerDependencies: + '@babel/core': '>=7.0.0-beta.0 <8' + '@jest/transform': ^29.0.0 + '@jest/types': ^29.0.0 + babel-jest: ^29.0.0 + esbuild: '*' + jest: ^29.0.0 + typescript: '>=4.3 <6' + peerDependenciesMeta: + '@babel/core': + optional: true + '@jest/transform': + optional: true + '@jest/types': + optional: true + babel-jest: + optional: true + esbuild: + optional: true + ts-node@10.9.2: resolution: { integrity: sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ== } hasBin: true @@ -16469,6 +17164,10 @@ packages: resolution: { integrity: sha512-5Y2/pp2wtJk8o08G0CMkuFPCO354FGwk/vbidxrdhRGZfd0tFnb4Qb8anp9XxXriwBgVPjdWbKpGl4J9lJY2jQ== } engines: { node: '>=16' } + type-fest@4.40.1: + resolution: { integrity: sha512-9YvLNnORDpI+vghLU/Nf+zSv0kL47KbVJ1o3sKgoTefl6i+zebxbiDQWoe/oWWqPhIgQdRZRT1KA9sCPL810SA== } + engines: { node: '>=16' } + type-is@1.6.18: resolution: { integrity: sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g== } engines: { node: '>= 0.6' } @@ -16566,6 +17265,9 @@ packages: typeorm-aurora-data-api-driver: optional: true + typescript-event-target@1.1.1: + resolution: { integrity: sha512-dFSOFBKV6uwaloBCCUhxlD3Pr/P1a/tJdcmPrTXCHlEFD3faj0mztjcGn6VBAhQ0/Bdy8K3VWrrqwbt/ffsYsg== } + typescript@5.5.2: resolution: { integrity: sha512-NcRtPEOsPFFWjobJEtfihkLCZCXZt/os3zf8nTxjVH3RvTSxjrCamJpbExGvYOF+tFHc3pA65qpdwPbzjohhew== } engines: { node: '>=14.17' } @@ -16580,6 +17282,18 @@ packages: uc.micro@2.1.0: resolution: { integrity: sha512-ARDJmphmdvUk6Glw7y9DQ2bFkKBHwQHLi2lsaH6PPmz/Ka9sFOBsBluozhDltWmnv9u/cF6Rt87znRTPV+yp/A== } + uglify-js@3.19.3: + resolution: { integrity: sha512-v3Xu+yuwBXisp6QYTcH4UbH+xYJXqnq2m/LtQVWKWzYc1iehYnLixoQDN9FH6/j9/oybfd6W9Ghwkl8+UMKTKQ== } + engines: { node: '>=0.8.0' } + hasBin: true + + uid-safe@2.1.5: + resolution: { integrity: sha512-KPHm4VL5dDXKz01UuEd88Df+KzynaohSL9fBh096KWAxSKZQDI2uBrVqtvRM4rwrIrRRKsdLNML/lnaaVSRioA== } + engines: { node: '>= 0.8' } + + uid2@0.0.4: + resolution: { integrity: sha512-IevTus0SbGwQzYh3+fRsAMTVVPOoIVufzacXcHPmdlle1jUpq7BRL+mw3dgeLanvGZdwwbWhRV6XrcFNdBmjWA== } + unbox-primitive@1.0.2: resolution: { integrity: sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw== } @@ -16613,8 +17327,8 @@ packages: undici-types@5.26.5: resolution: { integrity: sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA== } - undici-types@6.20.0: - resolution: { integrity: sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg== } + undici-types@6.19.8: + resolution: { integrity: sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw== } undici@5.28.3: resolution: { integrity: sha512-3ItfzbrhDlINjaP0duwnNsKpDQk3acHI3gVJ1z4fmwMK31k5G9OVIAMLSIaP6w4FaGkaAkN6zaQO9LUvZ1t7VA== } @@ -16869,6 +17583,10 @@ packages: resolution: { integrity: sha512-FGtKtv3xIpR6BYhvgH8MI/y78oT7d8Au3ww4QIxymrCtZEh5b8gCw2siywE+puhEmuWKDtmfrvF5UlB298ut3w== } engines: { node: '>=10.12.0' } + v8-to-istanbul@9.3.0: + resolution: { integrity: sha512-kiGUalWN+rgBJ/1OHZsBtU4rXZOfj/7rKQxULKlIzwzQSvMJUUNgPwJEEh7gU6xEVxC0ahoOBvN2YI8GH6FNgA== } + engines: { node: '>=10.12.0' } + v8flags@3.2.0: resolution: { integrity: sha512-mH8etigqMfiGWdeXpaaqGfs6BndypxusHHcv2qSHyZkGEznCd/qAXCWWRzeowtL54147cktFOC4P5y+kl8d8Jg== } engines: { node: '>= 0.10' } @@ -16918,6 +17636,9 @@ packages: vfile@6.0.1: resolution: { integrity: sha512-1bYqc7pt6NIADBJ98UiG0Bn/CHIVOoZ/IyEkqIruLg0mE1BKzkOXY2D6CSqQIcKqgadppE5lrxgWXJmXd7zZJw== } + victory-vendor@36.9.2: + resolution: { integrity: sha512-PnpQQMuxlwYdocC8fIJqVXvkeViHYzotI+NJrCuav0ZYFoq912ZHBk3mCeuj+5/VpodOjPe1z0Fk2ihgzlXqjQ== } + vinyl-file@3.0.0: resolution: { integrity: sha512-BoJDj+ca3D9xOuPEM6RWVtWQtvEPQiQYn82LvdxhLWplfQsBzBqtgK0yhCP0s1BNTi6dH9BO+dzybvyQIacifg== } engines: { node: '>=4' } @@ -17253,6 +17974,12 @@ packages: wink-nlp@2.3.0: resolution: { integrity: sha512-NcMmlsJavRZgaV4dAjsOQPuXG4v3yLRRssEibfx41lhmwTTOCaQGW7czNC73bDKCq7q4vqGTjX3/MFhK3I76TA== } + winston-daily-rotate-file@5.0.0: + resolution: { integrity: sha512-JDjiXXkM5qvwY06733vf09I2wnMXpZEhxEVOSPenZMii+g7pcDcTBt2MRugnoi8BwVSuCT2jfRXBUy+n1Zz/Yw== } + engines: { node: '>=8' } + peerDependencies: + winston: ^3 + winston-transport@4.7.0: resolution: { integrity: sha512-ajBj65K5I7denzer2IYW6+2bNIVqLGDHqDw3Ow8Ohh+vdW+rv4MZ6eiDvHoKhfJFZ2auyN8byXieDDJ96ViONg== } engines: { node: '>= 12.0.0' } @@ -17309,6 +18036,7 @@ packages: workbox-google-analytics@6.6.0: resolution: { integrity: sha512-p4DJa6OldXWd6M9zRl0H6vB9lkrmqYFkRQ2xEiNdBFp9U0LhsGO7hsBscVEyH9H2/3eZZt8c97NB2FD9U2NJ+Q== } + deprecated: It is not compatible with newer versions of GA starting with v4, as long as you are using GAv3 it should be ok, but the package is not longer being maintained workbox-google-analytics@7.0.0: resolution: { integrity: sha512-MEYM1JTn/qiC3DbpvP2BVhyIH+dV/5BjHk756u9VbwuAhu0QHyKscTnisQuz21lfRpOwiS9z4XdqeVAKol0bzg== } @@ -17435,6 +18163,18 @@ packages: utf-8-validate: optional: true + ws@8.18.2: + resolution: { integrity: sha512-DMricUmwGZUVr++AEAe2uiVM7UoO9MAVZMDu05UQOaUII0lp+zOzLLU4Xqh/JvTqklB1T4uELaaPBKyjE1r4fQ== } + engines: { node: '>=10.0.0' } + peerDependencies: + bufferutil: ^4.0.1 + utf-8-validate: '>=5.0.2' + peerDependenciesMeta: + bufferutil: + optional: true + utf-8-validate: + optional: true + xdg-default-browser@2.1.0: resolution: { integrity: sha512-HY4G725+IDQr16N8XOjAms5qJGArdJaWIuC7Q7A8UXIwj2mifqnPXephazyL7sIkQPvmEoPX3E0v2yFv6hQUNg== } engines: { node: '>=4' } @@ -17597,6 +18337,9 @@ packages: zod@3.24.2: resolution: { integrity: sha512-lY7CDW43ECgW9u1TcT3IoXHflywfVqDYze4waEz812jR/bZ8FHDsl7pFQoSZTz5N+2NqRXs8GBwnAwo3ZNxqhQ== } + zod@3.25.32: + resolution: { integrity: sha512-OSm2xTIRfW8CV5/QKgngwmQW/8aPfGdaQFlrGoErlgg/Epm7cjb6K6VEyExfe65a3VybUOnu381edLb0dfJl0g== } + zustand@4.5.2: resolution: { integrity: sha512-2cN1tPkDVkwCy5ickKrI7vijSjPksFRfqS6237NzT0vqSsztTNnQdHw9mmN7uBdk3gceVXU0a+21jFzFzAc9+g== } engines: { node: '>=12.7.0' } @@ -17700,7 +18443,7 @@ snapshots: '@anthropic-ai/sdk@0.20.9(encoding@0.1.13)': dependencies: '@types/node': 18.19.23 - '@types/node-fetch': 2.6.11 + '@types/node-fetch': 2.6.12 abort-controller: 3.0.0 agentkeepalive: 4.5.0 form-data-encoder: 1.7.2 @@ -17821,7 +18564,7 @@ snapshots: '@aws-crypto/ie11-detection': 3.0.0 '@aws-crypto/supports-web-crypto': 3.0.0 '@aws-crypto/util': 3.0.0 - '@aws-sdk/types': 3.523.0 + '@aws-sdk/types': 3.609.0 '@aws-sdk/util-locate-window': 3.495.0 '@aws-sdk/util-utf8-browser': 3.259.0 tslib: 1.14.1 @@ -17832,7 +18575,7 @@ snapshots: '@aws-crypto/sha256-js': 3.0.0 '@aws-crypto/supports-web-crypto': 3.0.0 '@aws-crypto/util': 3.0.0 - '@aws-sdk/types': 3.523.0 + '@aws-sdk/types': 3.609.0 '@aws-sdk/util-locate-window': 3.495.0 '@aws-sdk/util-utf8-browser': 3.259.0 tslib: 1.14.1 @@ -17850,7 +18593,7 @@ snapshots: '@aws-crypto/sha256-js@3.0.0': dependencies: '@aws-crypto/util': 3.0.0 - '@aws-sdk/types': 3.523.0 + '@aws-sdk/types': 3.609.0 tslib: 1.14.1 '@aws-crypto/sha256-js@5.2.0': @@ -18178,47 +18921,47 @@ snapshots: transitivePeerDependencies: - aws-crt - '@aws-sdk/client-secrets-manager@3.723.0': + '@aws-sdk/client-secrets-manager@3.726.1': dependencies: '@aws-crypto/sha256-browser': 5.2.0 '@aws-crypto/sha256-js': 5.2.0 - '@aws-sdk/client-sso-oidc': 3.723.0(@aws-sdk/client-sts@3.723.0) - '@aws-sdk/client-sts': 3.723.0 + '@aws-sdk/client-sso-oidc': 3.726.0(@aws-sdk/client-sts@3.726.1) + '@aws-sdk/client-sts': 3.726.1 '@aws-sdk/core': 3.723.0 - '@aws-sdk/credential-provider-node': 3.723.0(@aws-sdk/client-sso-oidc@3.723.0(@aws-sdk/client-sts@3.723.0))(@aws-sdk/client-sts@3.723.0) + '@aws-sdk/credential-provider-node': 3.726.0(@aws-sdk/client-sso-oidc@3.726.0(@aws-sdk/client-sts@3.726.1))(@aws-sdk/client-sts@3.726.1) '@aws-sdk/middleware-host-header': 3.723.0 '@aws-sdk/middleware-logger': 3.723.0 '@aws-sdk/middleware-recursion-detection': 3.723.0 - '@aws-sdk/middleware-user-agent': 3.723.0 + '@aws-sdk/middleware-user-agent': 3.726.0 '@aws-sdk/region-config-resolver': 3.723.0 '@aws-sdk/types': 3.723.0 - '@aws-sdk/util-endpoints': 3.723.0 + '@aws-sdk/util-endpoints': 3.726.0 '@aws-sdk/util-user-agent-browser': 3.723.0 - '@aws-sdk/util-user-agent-node': 3.723.0 - '@smithy/config-resolver': 4.0.0 - '@smithy/core': 3.0.0 - '@smithy/fetch-http-handler': 5.0.0 - '@smithy/hash-node': 4.0.0 - '@smithy/invalid-dependency': 4.0.0 - '@smithy/middleware-content-length': 4.0.0 - '@smithy/middleware-endpoint': 4.0.0 - '@smithy/middleware-retry': 4.0.0 - '@smithy/middleware-serde': 4.0.0 - '@smithy/middleware-stack': 4.0.0 - '@smithy/node-config-provider': 4.0.0 - '@smithy/node-http-handler': 4.0.0 - '@smithy/protocol-http': 5.0.0 - '@smithy/smithy-client': 4.0.0 - '@smithy/types': 4.0.0 - '@smithy/url-parser': 4.0.0 + '@aws-sdk/util-user-agent-node': 3.726.0 + '@smithy/config-resolver': 4.0.1 + '@smithy/core': 3.1.0 + '@smithy/fetch-http-handler': 5.0.1 + '@smithy/hash-node': 4.0.1 + '@smithy/invalid-dependency': 4.0.1 + '@smithy/middleware-content-length': 4.0.1 + '@smithy/middleware-endpoint': 4.0.1 + '@smithy/middleware-retry': 4.0.2 + '@smithy/middleware-serde': 4.0.1 + '@smithy/middleware-stack': 4.0.1 + '@smithy/node-config-provider': 4.0.1 + '@smithy/node-http-handler': 4.0.1 + '@smithy/protocol-http': 5.0.1 + '@smithy/smithy-client': 4.1.1 + '@smithy/types': 4.1.0 + '@smithy/url-parser': 4.0.1 '@smithy/util-base64': 4.0.0 '@smithy/util-body-length-browser': 4.0.0 '@smithy/util-body-length-node': 4.0.0 - '@smithy/util-defaults-mode-browser': 4.0.0 - '@smithy/util-defaults-mode-node': 4.0.0 - '@smithy/util-endpoints': 3.0.0 - '@smithy/util-middleware': 4.0.0 - '@smithy/util-retry': 4.0.0 + '@smithy/util-defaults-mode-browser': 4.0.2 + '@smithy/util-defaults-mode-node': 4.0.2 + '@smithy/util-endpoints': 3.0.1 + '@smithy/util-middleware': 4.0.1 + '@smithy/util-retry': 4.0.1 '@smithy/util-utf8': 4.0.0 '@types/uuid': 9.0.8 tslib: 2.6.2 @@ -18271,46 +19014,46 @@ snapshots: transitivePeerDependencies: - aws-crt - '@aws-sdk/client-sso-oidc@3.723.0(@aws-sdk/client-sts@3.723.0)': + '@aws-sdk/client-sso-oidc@3.726.0(@aws-sdk/client-sts@3.726.1)': dependencies: '@aws-crypto/sha256-browser': 5.2.0 '@aws-crypto/sha256-js': 5.2.0 - '@aws-sdk/client-sts': 3.723.0 + '@aws-sdk/client-sts': 3.726.1 '@aws-sdk/core': 3.723.0 - '@aws-sdk/credential-provider-node': 3.723.0(@aws-sdk/client-sso-oidc@3.723.0(@aws-sdk/client-sts@3.723.0))(@aws-sdk/client-sts@3.723.0) + '@aws-sdk/credential-provider-node': 3.726.0(@aws-sdk/client-sso-oidc@3.726.0(@aws-sdk/client-sts@3.726.1))(@aws-sdk/client-sts@3.726.1) '@aws-sdk/middleware-host-header': 3.723.0 '@aws-sdk/middleware-logger': 3.723.0 '@aws-sdk/middleware-recursion-detection': 3.723.0 - '@aws-sdk/middleware-user-agent': 3.723.0 + '@aws-sdk/middleware-user-agent': 3.726.0 '@aws-sdk/region-config-resolver': 3.723.0 '@aws-sdk/types': 3.723.0 - '@aws-sdk/util-endpoints': 3.723.0 + '@aws-sdk/util-endpoints': 3.726.0 '@aws-sdk/util-user-agent-browser': 3.723.0 - '@aws-sdk/util-user-agent-node': 3.723.0 - '@smithy/config-resolver': 4.0.0 - '@smithy/core': 3.0.0 - '@smithy/fetch-http-handler': 5.0.0 - '@smithy/hash-node': 4.0.0 - '@smithy/invalid-dependency': 4.0.0 - '@smithy/middleware-content-length': 4.0.0 - '@smithy/middleware-endpoint': 4.0.0 - '@smithy/middleware-retry': 4.0.0 - '@smithy/middleware-serde': 4.0.0 - '@smithy/middleware-stack': 4.0.0 - '@smithy/node-config-provider': 4.0.0 - '@smithy/node-http-handler': 4.0.0 - '@smithy/protocol-http': 5.0.0 - '@smithy/smithy-client': 4.0.0 - '@smithy/types': 4.0.0 - '@smithy/url-parser': 4.0.0 + '@aws-sdk/util-user-agent-node': 3.726.0 + '@smithy/config-resolver': 4.0.1 + '@smithy/core': 3.1.0 + '@smithy/fetch-http-handler': 5.0.1 + '@smithy/hash-node': 4.0.1 + '@smithy/invalid-dependency': 4.0.1 + '@smithy/middleware-content-length': 4.0.1 + '@smithy/middleware-endpoint': 4.0.1 + '@smithy/middleware-retry': 4.0.2 + '@smithy/middleware-serde': 4.0.1 + '@smithy/middleware-stack': 4.0.1 + '@smithy/node-config-provider': 4.0.1 + '@smithy/node-http-handler': 4.0.1 + '@smithy/protocol-http': 5.0.1 + '@smithy/smithy-client': 4.1.1 + '@smithy/types': 4.1.0 + '@smithy/url-parser': 4.0.1 '@smithy/util-base64': 4.0.0 '@smithy/util-body-length-browser': 4.0.0 '@smithy/util-body-length-node': 4.0.0 - '@smithy/util-defaults-mode-browser': 4.0.0 - '@smithy/util-defaults-mode-node': 4.0.0 - '@smithy/util-endpoints': 3.0.0 - '@smithy/util-middleware': 4.0.0 - '@smithy/util-retry': 4.0.0 + '@smithy/util-defaults-mode-browser': 4.0.2 + '@smithy/util-defaults-mode-node': 4.0.2 + '@smithy/util-endpoints': 3.0.1 + '@smithy/util-middleware': 4.0.1 + '@smithy/util-retry': 4.0.1 '@smithy/util-utf8': 4.0.0 tslib: 2.6.2 transitivePeerDependencies: @@ -18398,7 +19141,7 @@ snapshots: transitivePeerDependencies: - aws-crt - '@aws-sdk/client-sso@3.723.0': + '@aws-sdk/client-sso@3.726.0': dependencies: '@aws-crypto/sha256-browser': 5.2.0 '@aws-crypto/sha256-js': 5.2.0 @@ -18406,36 +19149,36 @@ snapshots: '@aws-sdk/middleware-host-header': 3.723.0 '@aws-sdk/middleware-logger': 3.723.0 '@aws-sdk/middleware-recursion-detection': 3.723.0 - '@aws-sdk/middleware-user-agent': 3.723.0 + '@aws-sdk/middleware-user-agent': 3.726.0 '@aws-sdk/region-config-resolver': 3.723.0 '@aws-sdk/types': 3.723.0 - '@aws-sdk/util-endpoints': 3.723.0 + '@aws-sdk/util-endpoints': 3.726.0 '@aws-sdk/util-user-agent-browser': 3.723.0 - '@aws-sdk/util-user-agent-node': 3.723.0 - '@smithy/config-resolver': 4.0.0 - '@smithy/core': 3.0.0 - '@smithy/fetch-http-handler': 5.0.0 - '@smithy/hash-node': 4.0.0 - '@smithy/invalid-dependency': 4.0.0 - '@smithy/middleware-content-length': 4.0.0 - '@smithy/middleware-endpoint': 4.0.0 - '@smithy/middleware-retry': 4.0.0 - '@smithy/middleware-serde': 4.0.0 - '@smithy/middleware-stack': 4.0.0 - '@smithy/node-config-provider': 4.0.0 - '@smithy/node-http-handler': 4.0.0 - '@smithy/protocol-http': 5.0.0 - '@smithy/smithy-client': 4.0.0 - '@smithy/types': 4.0.0 - '@smithy/url-parser': 4.0.0 + '@aws-sdk/util-user-agent-node': 3.726.0 + '@smithy/config-resolver': 4.0.1 + '@smithy/core': 3.1.0 + '@smithy/fetch-http-handler': 5.0.1 + '@smithy/hash-node': 4.0.1 + '@smithy/invalid-dependency': 4.0.1 + '@smithy/middleware-content-length': 4.0.1 + '@smithy/middleware-endpoint': 4.0.1 + '@smithy/middleware-retry': 4.0.2 + '@smithy/middleware-serde': 4.0.1 + '@smithy/middleware-stack': 4.0.1 + '@smithy/node-config-provider': 4.0.1 + '@smithy/node-http-handler': 4.0.1 + '@smithy/protocol-http': 5.0.1 + '@smithy/smithy-client': 4.1.1 + '@smithy/types': 4.1.0 + '@smithy/url-parser': 4.0.1 '@smithy/util-base64': 4.0.0 '@smithy/util-body-length-browser': 4.0.0 '@smithy/util-body-length-node': 4.0.0 - '@smithy/util-defaults-mode-browser': 4.0.0 - '@smithy/util-defaults-mode-node': 4.0.0 - '@smithy/util-endpoints': 3.0.0 - '@smithy/util-middleware': 4.0.0 - '@smithy/util-retry': 4.0.0 + '@smithy/util-defaults-mode-browser': 4.0.2 + '@smithy/util-defaults-mode-node': 4.0.2 + '@smithy/util-endpoints': 3.0.1 + '@smithy/util-middleware': 4.0.1 + '@smithy/util-retry': 4.0.1 '@smithy/util-utf8': 4.0.0 tslib: 2.6.2 transitivePeerDependencies: @@ -18571,46 +19314,46 @@ snapshots: transitivePeerDependencies: - aws-crt - '@aws-sdk/client-sts@3.723.0': + '@aws-sdk/client-sts@3.726.1': dependencies: '@aws-crypto/sha256-browser': 5.2.0 '@aws-crypto/sha256-js': 5.2.0 - '@aws-sdk/client-sso-oidc': 3.723.0(@aws-sdk/client-sts@3.723.0) + '@aws-sdk/client-sso-oidc': 3.726.0(@aws-sdk/client-sts@3.726.1) '@aws-sdk/core': 3.723.0 - '@aws-sdk/credential-provider-node': 3.723.0(@aws-sdk/client-sso-oidc@3.723.0(@aws-sdk/client-sts@3.723.0))(@aws-sdk/client-sts@3.723.0) + '@aws-sdk/credential-provider-node': 3.726.0(@aws-sdk/client-sso-oidc@3.726.0(@aws-sdk/client-sts@3.726.1))(@aws-sdk/client-sts@3.726.1) '@aws-sdk/middleware-host-header': 3.723.0 '@aws-sdk/middleware-logger': 3.723.0 '@aws-sdk/middleware-recursion-detection': 3.723.0 - '@aws-sdk/middleware-user-agent': 3.723.0 + '@aws-sdk/middleware-user-agent': 3.726.0 '@aws-sdk/region-config-resolver': 3.723.0 '@aws-sdk/types': 3.723.0 - '@aws-sdk/util-endpoints': 3.723.0 + '@aws-sdk/util-endpoints': 3.726.0 '@aws-sdk/util-user-agent-browser': 3.723.0 - '@aws-sdk/util-user-agent-node': 3.723.0 - '@smithy/config-resolver': 4.0.0 - '@smithy/core': 3.0.0 - '@smithy/fetch-http-handler': 5.0.0 - '@smithy/hash-node': 4.0.0 - '@smithy/invalid-dependency': 4.0.0 - '@smithy/middleware-content-length': 4.0.0 - '@smithy/middleware-endpoint': 4.0.0 - '@smithy/middleware-retry': 4.0.0 - '@smithy/middleware-serde': 4.0.0 - '@smithy/middleware-stack': 4.0.0 - '@smithy/node-config-provider': 4.0.0 - '@smithy/node-http-handler': 4.0.0 - '@smithy/protocol-http': 5.0.0 - '@smithy/smithy-client': 4.0.0 - '@smithy/types': 4.0.0 - '@smithy/url-parser': 4.0.0 + '@aws-sdk/util-user-agent-node': 3.726.0 + '@smithy/config-resolver': 4.0.1 + '@smithy/core': 3.1.0 + '@smithy/fetch-http-handler': 5.0.1 + '@smithy/hash-node': 4.0.1 + '@smithy/invalid-dependency': 4.0.1 + '@smithy/middleware-content-length': 4.0.1 + '@smithy/middleware-endpoint': 4.0.1 + '@smithy/middleware-retry': 4.0.2 + '@smithy/middleware-serde': 4.0.1 + '@smithy/middleware-stack': 4.0.1 + '@smithy/node-config-provider': 4.0.1 + '@smithy/node-http-handler': 4.0.1 + '@smithy/protocol-http': 5.0.1 + '@smithy/smithy-client': 4.1.1 + '@smithy/types': 4.1.0 + '@smithy/url-parser': 4.0.1 '@smithy/util-base64': 4.0.0 '@smithy/util-body-length-browser': 4.0.0 '@smithy/util-body-length-node': 4.0.0 - '@smithy/util-defaults-mode-browser': 4.0.0 - '@smithy/util-defaults-mode-node': 4.0.0 - '@smithy/util-endpoints': 3.0.0 - '@smithy/util-middleware': 4.0.0 - '@smithy/util-retry': 4.0.0 + '@smithy/util-defaults-mode-browser': 4.0.2 + '@smithy/util-defaults-mode-node': 4.0.2 + '@smithy/util-endpoints': 3.0.1 + '@smithy/util-middleware': 4.0.1 + '@smithy/util-retry': 4.0.1 '@smithy/util-utf8': 4.0.0 tslib: 2.6.2 transitivePeerDependencies: @@ -18629,14 +19372,14 @@ snapshots: '@aws-sdk/core@3.723.0': dependencies: '@aws-sdk/types': 3.723.0 - '@smithy/core': 3.0.0 - '@smithy/node-config-provider': 4.0.0 - '@smithy/property-provider': 4.0.0 - '@smithy/protocol-http': 5.0.0 - '@smithy/signature-v4': 5.0.0 - '@smithy/smithy-client': 4.0.0 - '@smithy/types': 4.0.0 - '@smithy/util-middleware': 4.0.0 + '@smithy/core': 3.1.0 + '@smithy/node-config-provider': 4.0.1 + '@smithy/property-provider': 4.0.1 + '@smithy/protocol-http': 5.0.1 + '@smithy/signature-v4': 5.0.1 + '@smithy/smithy-client': 4.1.1 + '@smithy/types': 4.1.0 + '@smithy/util-middleware': 4.0.1 fast-xml-parser: 4.4.1 tslib: 2.6.2 @@ -18672,8 +19415,8 @@ snapshots: dependencies: '@aws-sdk/core': 3.723.0 '@aws-sdk/types': 3.723.0 - '@smithy/property-provider': 4.0.0 - '@smithy/types': 4.0.0 + '@smithy/property-provider': 4.0.1 + '@smithy/types': 4.1.0 tslib: 2.6.2 '@aws-sdk/credential-provider-env@3.750.0': @@ -18700,13 +19443,13 @@ snapshots: dependencies: '@aws-sdk/core': 3.723.0 '@aws-sdk/types': 3.723.0 - '@smithy/fetch-http-handler': 5.0.0 - '@smithy/node-http-handler': 4.0.0 - '@smithy/property-provider': 4.0.0 - '@smithy/protocol-http': 5.0.0 - '@smithy/smithy-client': 4.0.0 - '@smithy/types': 4.0.0 - '@smithy/util-stream': 4.0.0 + '@smithy/fetch-http-handler': 5.0.1 + '@smithy/node-http-handler': 4.0.1 + '@smithy/property-provider': 4.0.1 + '@smithy/protocol-http': 5.0.1 + '@smithy/smithy-client': 4.1.1 + '@smithy/types': 4.1.0 + '@smithy/util-stream': 4.0.1 tslib: 2.6.2 '@aws-sdk/credential-provider-http@3.750.0': @@ -18754,20 +19497,20 @@ snapshots: - '@aws-sdk/credential-provider-node' - aws-crt - '@aws-sdk/credential-provider-ini@3.723.0(@aws-sdk/client-sso-oidc@3.723.0(@aws-sdk/client-sts@3.723.0))(@aws-sdk/client-sts@3.723.0)': + '@aws-sdk/credential-provider-ini@3.726.0(@aws-sdk/client-sso-oidc@3.726.0(@aws-sdk/client-sts@3.726.1))(@aws-sdk/client-sts@3.726.1)': dependencies: - '@aws-sdk/client-sts': 3.723.0 + '@aws-sdk/client-sts': 3.726.1 '@aws-sdk/core': 3.723.0 '@aws-sdk/credential-provider-env': 3.723.0 '@aws-sdk/credential-provider-http': 3.723.0 '@aws-sdk/credential-provider-process': 3.723.0 - '@aws-sdk/credential-provider-sso': 3.723.0(@aws-sdk/client-sso-oidc@3.723.0(@aws-sdk/client-sts@3.723.0)) - '@aws-sdk/credential-provider-web-identity': 3.723.0(@aws-sdk/client-sts@3.723.0) + '@aws-sdk/credential-provider-sso': 3.726.0(@aws-sdk/client-sso-oidc@3.726.0(@aws-sdk/client-sts@3.726.1)) + '@aws-sdk/credential-provider-web-identity': 3.723.0(@aws-sdk/client-sts@3.726.1) '@aws-sdk/types': 3.723.0 - '@smithy/credential-provider-imds': 4.0.0 - '@smithy/property-provider': 4.0.0 - '@smithy/shared-ini-file-loader': 4.0.0 - '@smithy/types': 4.0.0 + '@smithy/credential-provider-imds': 4.0.1 + '@smithy/property-provider': 4.0.1 + '@smithy/shared-ini-file-loader': 4.0.1 + '@smithy/types': 4.1.0 tslib: 2.6.2 transitivePeerDependencies: - '@aws-sdk/client-sso-oidc' @@ -18824,19 +19567,19 @@ snapshots: transitivePeerDependencies: - aws-crt - '@aws-sdk/credential-provider-node@3.723.0(@aws-sdk/client-sso-oidc@3.723.0(@aws-sdk/client-sts@3.723.0))(@aws-sdk/client-sts@3.723.0)': + '@aws-sdk/credential-provider-node@3.726.0(@aws-sdk/client-sso-oidc@3.726.0(@aws-sdk/client-sts@3.726.1))(@aws-sdk/client-sts@3.726.1)': dependencies: '@aws-sdk/credential-provider-env': 3.723.0 '@aws-sdk/credential-provider-http': 3.723.0 - '@aws-sdk/credential-provider-ini': 3.723.0(@aws-sdk/client-sso-oidc@3.723.0(@aws-sdk/client-sts@3.723.0))(@aws-sdk/client-sts@3.723.0) + '@aws-sdk/credential-provider-ini': 3.726.0(@aws-sdk/client-sso-oidc@3.726.0(@aws-sdk/client-sts@3.726.1))(@aws-sdk/client-sts@3.726.1) '@aws-sdk/credential-provider-process': 3.723.0 - '@aws-sdk/credential-provider-sso': 3.723.0(@aws-sdk/client-sso-oidc@3.723.0(@aws-sdk/client-sts@3.723.0)) - '@aws-sdk/credential-provider-web-identity': 3.723.0(@aws-sdk/client-sts@3.723.0) + '@aws-sdk/credential-provider-sso': 3.726.0(@aws-sdk/client-sso-oidc@3.726.0(@aws-sdk/client-sts@3.726.1)) + '@aws-sdk/credential-provider-web-identity': 3.723.0(@aws-sdk/client-sts@3.726.1) '@aws-sdk/types': 3.723.0 - '@smithy/credential-provider-imds': 4.0.0 - '@smithy/property-provider': 4.0.0 - '@smithy/shared-ini-file-loader': 4.0.0 - '@smithy/types': 4.0.0 + '@smithy/credential-provider-imds': 4.0.1 + '@smithy/property-provider': 4.0.1 + '@smithy/shared-ini-file-loader': 4.0.1 + '@smithy/types': 4.1.0 tslib: 2.6.2 transitivePeerDependencies: - '@aws-sdk/client-sso-oidc' @@ -18880,9 +19623,9 @@ snapshots: dependencies: '@aws-sdk/core': 3.723.0 '@aws-sdk/types': 3.723.0 - '@smithy/property-provider': 4.0.0 - '@smithy/shared-ini-file-loader': 4.0.0 - '@smithy/types': 4.0.0 + '@smithy/property-provider': 4.0.1 + '@smithy/shared-ini-file-loader': 4.0.1 + '@smithy/types': 4.1.0 tslib: 2.6.2 '@aws-sdk/credential-provider-process@3.750.0': @@ -18919,15 +19662,15 @@ snapshots: - '@aws-sdk/credential-provider-node' - aws-crt - '@aws-sdk/credential-provider-sso@3.723.0(@aws-sdk/client-sso-oidc@3.723.0(@aws-sdk/client-sts@3.723.0))': + '@aws-sdk/credential-provider-sso@3.726.0(@aws-sdk/client-sso-oidc@3.726.0(@aws-sdk/client-sts@3.726.1))': dependencies: - '@aws-sdk/client-sso': 3.723.0 + '@aws-sdk/client-sso': 3.726.0 '@aws-sdk/core': 3.723.0 - '@aws-sdk/token-providers': 3.723.0(@aws-sdk/client-sso-oidc@3.723.0(@aws-sdk/client-sts@3.723.0)) + '@aws-sdk/token-providers': 3.723.0(@aws-sdk/client-sso-oidc@3.726.0(@aws-sdk/client-sts@3.726.1)) '@aws-sdk/types': 3.723.0 - '@smithy/property-provider': 4.0.0 - '@smithy/shared-ini-file-loader': 4.0.0 - '@smithy/types': 4.0.0 + '@smithy/property-provider': 4.0.1 + '@smithy/shared-ini-file-loader': 4.0.1 + '@smithy/types': 4.1.0 tslib: 2.6.2 transitivePeerDependencies: - '@aws-sdk/client-sso-oidc' @@ -18964,13 +19707,13 @@ snapshots: - '@aws-sdk/credential-provider-node' - aws-crt - '@aws-sdk/credential-provider-web-identity@3.723.0(@aws-sdk/client-sts@3.723.0)': + '@aws-sdk/credential-provider-web-identity@3.723.0(@aws-sdk/client-sts@3.726.1)': dependencies: - '@aws-sdk/client-sts': 3.723.0 + '@aws-sdk/client-sts': 3.726.1 '@aws-sdk/core': 3.723.0 '@aws-sdk/types': 3.723.0 - '@smithy/property-provider': 4.0.0 - '@smithy/types': 4.0.0 + '@smithy/property-provider': 4.0.1 + '@smithy/types': 4.1.0 tslib: 2.6.2 '@aws-sdk/credential-provider-web-identity@3.750.0': @@ -18992,9 +19735,9 @@ snapshots: '@aws-sdk/lib-storage@3.726.1(@aws-sdk/client-s3@3.529.1)': dependencies: '@aws-sdk/client-s3': 3.529.1 - '@smithy/abort-controller': 4.0.0 - '@smithy/middleware-endpoint': 4.0.0 - '@smithy/smithy-client': 4.0.0 + '@smithy/abort-controller': 4.0.1 + '@smithy/middleware-endpoint': 4.0.1 + '@smithy/smithy-client': 4.1.1 buffer: 5.6.0 events: 3.3.0 stream-browserify: 3.0.0 @@ -19054,8 +19797,8 @@ snapshots: '@aws-sdk/middleware-host-header@3.723.0': dependencies: '@aws-sdk/types': 3.723.0 - '@smithy/protocol-http': 5.0.0 - '@smithy/types': 4.0.0 + '@smithy/protocol-http': 5.0.1 + '@smithy/types': 4.1.0 tslib: 2.6.2 '@aws-sdk/middleware-host-header@3.734.0': @@ -19086,7 +19829,7 @@ snapshots: '@aws-sdk/middleware-logger@3.723.0': dependencies: '@aws-sdk/types': 3.723.0 - '@smithy/types': 4.0.0 + '@smithy/types': 4.1.0 tslib: 2.6.2 '@aws-sdk/middleware-logger@3.734.0': @@ -19112,8 +19855,8 @@ snapshots: '@aws-sdk/middleware-recursion-detection@3.723.0': dependencies: '@aws-sdk/types': 3.723.0 - '@smithy/protocol-http': 5.0.0 - '@smithy/types': 4.0.0 + '@smithy/protocol-http': 5.0.1 + '@smithy/types': 4.1.0 tslib: 2.6.2 '@aws-sdk/middleware-recursion-detection@3.734.0': @@ -19184,14 +19927,14 @@ snapshots: '@smithy/types': 2.11.0 tslib: 2.6.2 - '@aws-sdk/middleware-user-agent@3.723.0': + '@aws-sdk/middleware-user-agent@3.726.0': dependencies: '@aws-sdk/core': 3.723.0 '@aws-sdk/types': 3.723.0 - '@aws-sdk/util-endpoints': 3.723.0 - '@smithy/core': 3.0.0 - '@smithy/protocol-http': 5.0.0 - '@smithy/types': 4.0.0 + '@aws-sdk/util-endpoints': 3.726.0 + '@smithy/core': 3.1.0 + '@smithy/protocol-http': 5.0.1 + '@smithy/types': 4.1.0 tslib: 2.6.2 '@aws-sdk/middleware-user-agent@3.750.0': @@ -19267,10 +20010,10 @@ snapshots: '@aws-sdk/region-config-resolver@3.723.0': dependencies: '@aws-sdk/types': 3.723.0 - '@smithy/node-config-provider': 4.0.0 - '@smithy/types': 4.0.0 + '@smithy/node-config-provider': 4.0.1 + '@smithy/types': 4.1.0 '@smithy/util-config-provider': 4.0.0 - '@smithy/util-middleware': 4.0.0 + '@smithy/util-middleware': 4.0.1 tslib: 2.6.2 '@aws-sdk/region-config-resolver@3.734.0': @@ -19343,13 +20086,13 @@ snapshots: - '@aws-sdk/credential-provider-node' - aws-crt - '@aws-sdk/token-providers@3.723.0(@aws-sdk/client-sso-oidc@3.723.0(@aws-sdk/client-sts@3.723.0))': + '@aws-sdk/token-providers@3.723.0(@aws-sdk/client-sso-oidc@3.726.0(@aws-sdk/client-sts@3.726.1))': dependencies: - '@aws-sdk/client-sso-oidc': 3.723.0(@aws-sdk/client-sts@3.723.0) + '@aws-sdk/client-sso-oidc': 3.726.0(@aws-sdk/client-sts@3.726.1) '@aws-sdk/types': 3.723.0 - '@smithy/property-provider': 4.0.0 - '@smithy/shared-ini-file-loader': 4.0.0 - '@smithy/types': 4.0.0 + '@smithy/property-provider': 4.0.1 + '@smithy/shared-ini-file-loader': 4.0.1 + '@smithy/types': 4.1.0 tslib: 2.6.2 '@aws-sdk/token-providers@3.750.0': @@ -19373,9 +20116,14 @@ snapshots: '@smithy/types': 2.11.0 tslib: 2.6.2 + '@aws-sdk/types@3.609.0': + dependencies: + '@smithy/types': 3.3.0 + tslib: 2.6.2 + '@aws-sdk/types@3.723.0': dependencies: - '@smithy/types': 4.0.0 + '@smithy/types': 4.1.0 tslib: 2.6.2 '@aws-sdk/types@3.734.0': @@ -19399,11 +20147,11 @@ snapshots: '@smithy/util-endpoints': 1.1.5 tslib: 2.6.2 - '@aws-sdk/util-endpoints@3.723.0': + '@aws-sdk/util-endpoints@3.726.0': dependencies: '@aws-sdk/types': 3.723.0 - '@smithy/types': 4.0.0 - '@smithy/util-endpoints': 3.0.0 + '@smithy/types': 4.1.0 + '@smithy/util-endpoints': 3.0.1 tslib: 2.6.2 '@aws-sdk/util-endpoints@3.743.0': @@ -19434,7 +20182,7 @@ snapshots: '@aws-sdk/util-user-agent-browser@3.723.0': dependencies: '@aws-sdk/types': 3.723.0 - '@smithy/types': 4.0.0 + '@smithy/types': 4.1.0 bowser: 2.11.0 tslib: 2.6.2 @@ -19459,12 +20207,12 @@ snapshots: '@smithy/types': 2.11.0 tslib: 2.6.2 - '@aws-sdk/util-user-agent-node@3.723.0': + '@aws-sdk/util-user-agent-node@3.726.0': dependencies: - '@aws-sdk/middleware-user-agent': 3.723.0 + '@aws-sdk/middleware-user-agent': 3.726.0 '@aws-sdk/types': 3.723.0 - '@smithy/node-config-provider': 4.0.0 - '@smithy/types': 4.0.0 + '@smithy/node-config-provider': 4.0.1 + '@smithy/types': 4.1.0 tslib: 2.6.2 '@aws-sdk/util-user-agent-node@3.750.0': @@ -19743,7 +20491,7 @@ snapshots: '@babel/core': 7.24.0 '@babel/helper-plugin-utils': 7.25.9 '@babel/helper-skip-transparent-expression-wrappers': 7.22.5 - '@babel/plugin-transform-optional-chaining': 7.24.5(@babel/core@7.24.0) + '@babel/plugin-transform-optional-chaining': 7.23.4(@babel/core@7.24.0) '@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining@7.24.1(@babel/core@7.24.0)': dependencies: @@ -20332,7 +21080,7 @@ snapshots: '@babel/helper-compilation-targets': 7.23.6 '@babel/helper-plugin-utils': 7.25.9 '@babel/plugin-syntax-object-rest-spread': 7.8.3(@babel/core@7.24.0) - '@babel/plugin-transform-parameters': 7.24.5(@babel/core@7.24.0) + '@babel/plugin-transform-parameters': 7.23.3(@babel/core@7.24.0) '@babel/plugin-transform-object-rest-spread@7.24.5(@babel/core@7.24.0)': dependencies: @@ -20833,7 +21581,7 @@ snapshots: dependencies: regenerator-runtime: 0.14.1 - '@babel/runtime@7.26.9': + '@babel/runtime@7.26.10': dependencies: regenerator-runtime: 0.14.1 @@ -20865,7 +21613,7 @@ snapshots: '@babel/preset-react': 7.25.9(@babel/core@7.24.0) '@rollup/plugin-inject': 5.0.5(rollup@3.29.4) '@rollup/plugin-json': 6.1.0(rollup@3.29.4) - '@types/node-fetch': 2.6.11 + '@types/node-fetch': 2.6.12 async-mutex: 0.5.0 bottleneck: 2.19.5 crypto-js: 4.2.0 @@ -20915,41 +21663,56 @@ snapshots: '@cfworker/json-schema@4.1.0': {} - '@codemirror/autocomplete@6.14.0(@codemirror/language@6.10.1)(@codemirror/state@6.4.1)(@codemirror/view@6.25.1)(@lezer/common@1.2.1)': + '@codemirror/autocomplete@6.14.0(@codemirror/language@6.10.1)(@codemirror/state@6.4.1)(@codemirror/view@6.26.3)(@lezer/common@1.2.1)': dependencies: '@codemirror/language': 6.10.1 '@codemirror/state': 6.4.1 - '@codemirror/view': 6.25.1 + '@codemirror/view': 6.26.3 '@lezer/common': 1.2.1 - '@codemirror/autocomplete@6.14.0(@codemirror/language@6.10.1)(@codemirror/state@6.4.1)(@codemirror/view@6.26.3)(@lezer/common@1.2.1)': + '@codemirror/commands@6.3.3': dependencies: '@codemirror/language': 6.10.1 '@codemirror/state': 6.4.1 '@codemirror/view': 6.26.3 '@lezer/common': 1.2.1 - '@codemirror/commands@6.3.3': + '@codemirror/commands@6.5.0': dependencies: '@codemirror/language': 6.10.1 '@codemirror/state': 6.4.1 - '@codemirror/view': 6.25.1 + '@codemirror/view': 6.26.3 '@lezer/common': 1.2.1 - '@codemirror/commands@6.5.0': + '@codemirror/lang-css@6.2.1(@codemirror/view@6.26.3)': dependencies: + '@codemirror/autocomplete': 6.14.0(@codemirror/language@6.10.1)(@codemirror/state@6.4.1)(@codemirror/view@6.26.3)(@lezer/common@1.2.1) + '@codemirror/language': 6.10.1 + '@codemirror/state': 6.4.1 + '@lezer/common': 1.2.1 + '@lezer/css': 1.1.8 + transitivePeerDependencies: + - '@codemirror/view' + + '@codemirror/lang-html@6.4.9': + dependencies: + '@codemirror/autocomplete': 6.14.0(@codemirror/language@6.10.1)(@codemirror/state@6.4.1)(@codemirror/view@6.26.3)(@lezer/common@1.2.1) + '@codemirror/lang-css': 6.2.1(@codemirror/view@6.26.3) + '@codemirror/lang-javascript': 6.2.2 '@codemirror/language': 6.10.1 '@codemirror/state': 6.4.1 '@codemirror/view': 6.26.3 '@lezer/common': 1.2.1 + '@lezer/css': 1.1.8 + '@lezer/html': 1.3.10 '@codemirror/lang-javascript@6.2.2': dependencies: - '@codemirror/autocomplete': 6.14.0(@codemirror/language@6.10.1)(@codemirror/state@6.4.1)(@codemirror/view@6.25.1)(@lezer/common@1.2.1) + '@codemirror/autocomplete': 6.14.0(@codemirror/language@6.10.1)(@codemirror/state@6.4.1)(@codemirror/view@6.26.3)(@lezer/common@1.2.1) '@codemirror/language': 6.10.1 '@codemirror/lint': 6.5.0 '@codemirror/state': 6.4.1 - '@codemirror/view': 6.25.1 + '@codemirror/view': 6.26.3 '@lezer/common': 1.2.1 '@lezer/javascript': 1.4.13 @@ -20958,10 +21721,20 @@ snapshots: '@codemirror/language': 6.10.1 '@lezer/json': 1.0.2 + '@codemirror/lang-markdown@6.2.5': + dependencies: + '@codemirror/autocomplete': 6.14.0(@codemirror/language@6.10.1)(@codemirror/state@6.4.1)(@codemirror/view@6.26.3)(@lezer/common@1.2.1) + '@codemirror/lang-html': 6.4.9 + '@codemirror/language': 6.10.1 + '@codemirror/state': 6.4.1 + '@codemirror/view': 6.26.3 + '@lezer/common': 1.2.1 + '@lezer/markdown': 1.3.0 + '@codemirror/language@6.10.1': dependencies: '@codemirror/state': 6.4.1 - '@codemirror/view': 6.25.1 + '@codemirror/view': 6.26.3 '@lezer/common': 1.2.1 '@lezer/highlight': 1.2.1 '@lezer/lr': 1.4.0 @@ -20970,7 +21743,7 @@ snapshots: '@codemirror/lint@6.5.0': dependencies: '@codemirror/state': 6.4.1 - '@codemirror/view': 6.25.1 + '@codemirror/view': 6.26.3 crelt: 1.0.6 '@codemirror/search@6.5.6': @@ -20988,12 +21761,6 @@ snapshots: '@codemirror/view': 6.26.3 '@lezer/highlight': 1.2.1 - '@codemirror/view@6.25.1': - dependencies: - '@codemirror/state': 6.4.1 - style-mod: 4.1.2 - w3c-keyname: 2.2.8 - '@codemirror/view@6.26.3': dependencies: '@codemirror/state': 6.4.1 @@ -21175,7 +21942,7 @@ snapshots: '@elastic/transport@8.4.1': dependencies: - debug: 4.3.7(supports-color@8.1.1) + debug: 4.4.0(supports-color@8.1.1) hpagent: 1.2.0 ms: 2.1.3 secure-json-parse: 2.7.0 @@ -21192,7 +21959,7 @@ snapshots: '@emotion/babel-plugin@11.11.0': dependencies: '@babel/helper-module-imports': 7.25.9 - '@babel/runtime': 7.26.9 + '@babel/runtime': 7.26.10 '@emotion/hash': 0.9.1 '@emotion/memoize': 0.8.1 '@emotion/serialize': 1.1.3 @@ -21492,7 +22259,7 @@ snapshots: '@gar/promisify@1.1.3': {} - '@getzep/zep-cloud@1.0.7(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(langchain@0.3.5(@langchain/anthropic@0.3.14(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13))(@langchain/aws@0.1.4(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/cohere@0.0.7(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/google-genai@0.2.3(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(zod@3.22.4))(@langchain/google-vertexai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(zod@3.22.4))(@langchain/groq@0.1.2(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)))(@langchain/mistralai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/ollama@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(axios@1.7.9)(cheerio@1.0.0-rc.12)(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(typeorm@0.3.20(ioredis@5.3.2)(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2)))(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)))': + '@getzep/zep-cloud@1.0.7(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(langchain@0.3.6(@langchain/anthropic@0.3.14(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13))(@langchain/aws@0.1.4(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/cohere@0.0.7(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/google-genai@0.2.3(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(zod@3.22.4))(@langchain/google-vertexai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(zod@3.22.4))(@langchain/groq@0.1.2(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)))(@langchain/mistralai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/ollama@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(axios@1.7.9)(cheerio@1.0.0-rc.12)(encoding@0.1.13)(handlebars@4.7.8)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(typeorm@0.3.20(ioredis@5.3.2)(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2)))(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)))': dependencies: form-data: 4.0.0 node-fetch: 2.7.0(encoding@0.1.13) @@ -21501,7 +22268,7 @@ snapshots: zod: 3.23.8 optionalDependencies: '@langchain/core': 0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)) - langchain: 0.3.5(@langchain/anthropic@0.3.14(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13))(@langchain/aws@0.1.4(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/cohere@0.0.7(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/google-genai@0.2.3(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(zod@3.22.4))(@langchain/google-vertexai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(zod@3.22.4))(@langchain/groq@0.1.2(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)))(@langchain/mistralai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/ollama@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(axios@1.7.9)(cheerio@1.0.0-rc.12)(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(typeorm@0.3.20(ioredis@5.3.2)(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2)))(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)) + langchain: 0.3.6(@langchain/anthropic@0.3.14(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13))(@langchain/aws@0.1.4(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/cohere@0.0.7(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/google-genai@0.2.3(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(zod@3.22.4))(@langchain/google-vertexai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(zod@3.22.4))(@langchain/groq@0.1.2(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)))(@langchain/mistralai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/ollama@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(axios@1.7.9)(cheerio@1.0.0-rc.12)(encoding@0.1.13)(handlebars@4.7.8)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(typeorm@0.3.20(ioredis@5.3.2)(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2)))(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)) transitivePeerDependencies: - encoding @@ -21632,6 +22399,16 @@ snapshots: - encoding - supports-color + '@google/genai@0.7.0(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.4)': + dependencies: + google-auth-library: 9.15.1(encoding@0.1.13) + ws: 8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4) + transitivePeerDependencies: + - bufferutil + - encoding + - supports-color + - utf-8-validate + '@google/generative-ai@0.24.0': {} '@graphql-typed-document-node/core@3.2.0(graphql@16.8.1)': @@ -21686,7 +22463,7 @@ snapshots: '@humanwhocodes/object-schema@2.0.2': {} - '@ibm-cloud/watsonx-ai@1.1.2': + '@ibm-cloud/watsonx-ai@1.2.0': dependencies: '@types/node': 18.19.23 extend: 3.0.2 @@ -21799,7 +22576,7 @@ snapshots: '@jest/console@27.5.1': dependencies: '@jest/types': 27.5.1 - '@types/node': 22.13.9 + '@types/node': 22.5.4 chalk: 4.1.2 jest-message-util: 27.5.1 jest-util: 27.5.1 @@ -21808,27 +22585,36 @@ snapshots: '@jest/console@28.1.3': dependencies: '@jest/types': 28.1.3 - '@types/node': 22.13.9 + '@types/node': 22.5.4 chalk: 4.1.2 jest-message-util: 28.1.3 jest-util: 28.1.3 slash: 3.0.0 - '@jest/core@27.5.1(bufferutil@4.0.8)(canvas@2.11.2(encoding@0.1.13))(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2))(utf-8-validate@6.0.4)': + '@jest/console@29.7.0': + dependencies: + '@jest/types': 29.6.3 + '@types/node': 22.5.4 + chalk: 4.1.2 + jest-message-util: 29.7.0 + jest-util: 29.7.0 + slash: 3.0.0 + + '@jest/core@27.5.1(bufferutil@4.0.8)(canvas@2.11.2(encoding@0.1.13))(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2))(utf-8-validate@6.0.4)': dependencies: '@jest/console': 27.5.1 '@jest/reporters': 27.5.1 '@jest/test-result': 27.5.1 '@jest/transform': 27.5.1 '@jest/types': 27.5.1 - '@types/node': 22.13.9 + '@types/node': 22.5.4 ansi-escapes: 4.3.2 chalk: 4.1.2 emittery: 0.8.1 exit: 0.1.2 graceful-fs: 4.2.11 jest-changed-files: 27.5.1 - jest-config: 27.5.1(bufferutil@4.0.8)(canvas@2.11.2(encoding@0.1.13))(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2))(utf-8-validate@6.0.4) + jest-config: 27.5.1(bufferutil@4.0.8)(canvas@2.11.2(encoding@0.1.13))(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2))(utf-8-validate@6.0.4) jest-haste-map: 27.5.1 jest-message-util: 27.5.1 jest-regex-util: 27.5.1 @@ -21851,32 +22637,99 @@ snapshots: - ts-node - utf-8-validate + '@jest/core@29.7.0(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2))': + dependencies: + '@jest/console': 29.7.0 + '@jest/reporters': 29.7.0 + '@jest/test-result': 29.7.0 + '@jest/transform': 29.7.0 + '@jest/types': 29.6.3 + '@types/node': 22.5.4 + ansi-escapes: 4.3.2 + chalk: 4.1.2 + ci-info: 3.9.0 + exit: 0.1.2 + graceful-fs: 4.2.11 + jest-changed-files: 29.7.0 + jest-config: 29.7.0(@types/node@22.5.4)(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2)) + jest-haste-map: 29.7.0 + jest-message-util: 29.7.0 + jest-regex-util: 29.6.3 + jest-resolve: 29.7.0 + jest-resolve-dependencies: 29.7.0 + jest-runner: 29.7.0 + jest-runtime: 29.7.0 + jest-snapshot: 29.7.0 + jest-util: 29.7.0 + jest-validate: 29.7.0 + jest-watcher: 29.7.0 + micromatch: 4.0.8 + pretty-format: 29.7.0 + slash: 3.0.0 + strip-ansi: 6.0.1 + transitivePeerDependencies: + - babel-plugin-macros + - supports-color + - ts-node + '@jest/environment@27.5.1': dependencies: '@jest/fake-timers': 27.5.1 '@jest/types': 27.5.1 - '@types/node': 22.13.9 + '@types/node': 22.5.4 jest-mock: 27.5.1 + '@jest/environment@29.7.0': + dependencies: + '@jest/fake-timers': 29.7.0 + '@jest/types': 29.6.3 + '@types/node': 22.5.4 + jest-mock: 29.7.0 + '@jest/expect-utils@29.7.0': dependencies: jest-get-type: 29.6.3 + '@jest/expect@29.7.0': + dependencies: + expect: 29.7.0 + jest-snapshot: 29.7.0 + transitivePeerDependencies: + - supports-color + '@jest/fake-timers@27.5.1': dependencies: '@jest/types': 27.5.1 '@sinonjs/fake-timers': 8.1.0 - '@types/node': 22.13.9 + '@types/node': 22.5.4 jest-message-util: 27.5.1 jest-mock: 27.5.1 jest-util: 27.5.1 + '@jest/fake-timers@29.7.0': + dependencies: + '@jest/types': 29.6.3 + '@sinonjs/fake-timers': 10.3.0 + '@types/node': 22.5.4 + jest-message-util: 29.7.0 + jest-mock: 29.7.0 + jest-util: 29.7.0 + '@jest/globals@27.5.1': dependencies: '@jest/environment': 27.5.1 '@jest/types': 27.5.1 expect: 27.5.1 + '@jest/globals@29.7.0': + dependencies: + '@jest/environment': 29.7.0 + '@jest/expect': 29.7.0 + '@jest/types': 29.6.3 + jest-mock: 29.7.0 + transitivePeerDependencies: + - supports-color + '@jest/reporters@27.5.1': dependencies: '@bcoe/v8-coverage': 0.2.3 @@ -21884,7 +22737,7 @@ snapshots: '@jest/test-result': 27.5.1 '@jest/transform': 27.5.1 '@jest/types': 27.5.1 - '@types/node': 22.13.9 + '@types/node': 22.5.4 chalk: 4.1.2 collect-v8-coverage: 1.0.2 exit: 0.1.2 @@ -21907,6 +22760,35 @@ snapshots: transitivePeerDependencies: - supports-color + '@jest/reporters@29.7.0': + dependencies: + '@bcoe/v8-coverage': 0.2.3 + '@jest/console': 29.7.0 + '@jest/test-result': 29.7.0 + '@jest/transform': 29.7.0 + '@jest/types': 29.6.3 + '@jridgewell/trace-mapping': 0.3.25 + '@types/node': 22.5.4 + chalk: 4.1.2 + collect-v8-coverage: 1.0.2 + exit: 0.1.2 + glob: 7.2.3 + graceful-fs: 4.2.11 + istanbul-lib-coverage: 3.2.2 + istanbul-lib-instrument: 6.0.3 + istanbul-lib-report: 3.0.1 + istanbul-lib-source-maps: 4.0.1 + istanbul-reports: 3.1.7 + jest-message-util: 29.7.0 + jest-util: 29.7.0 + jest-worker: 29.7.0 + slash: 3.0.0 + string-length: 4.0.2 + strip-ansi: 6.0.1 + v8-to-istanbul: 9.3.0 + transitivePeerDependencies: + - supports-color + '@jest/schemas@28.1.3': dependencies: '@sinclair/typebox': 0.24.51 @@ -21921,6 +22803,12 @@ snapshots: graceful-fs: 4.2.11 source-map: 0.6.1 + '@jest/source-map@29.6.3': + dependencies: + '@jridgewell/trace-mapping': 0.3.25 + callsites: 3.1.0 + graceful-fs: 4.2.11 + '@jest/test-result@27.5.1': dependencies: '@jest/console': 27.5.1 @@ -21935,6 +22823,13 @@ snapshots: '@types/istanbul-lib-coverage': 2.0.6 collect-v8-coverage: 1.0.2 + '@jest/test-result@29.7.0': + dependencies: + '@jest/console': 29.7.0 + '@jest/types': 29.6.3 + '@types/istanbul-lib-coverage': 2.0.6 + collect-v8-coverage: 1.0.2 + '@jest/test-sequencer@27.5.1': dependencies: '@jest/test-result': 27.5.1 @@ -21944,6 +22839,13 @@ snapshots: transitivePeerDependencies: - supports-color + '@jest/test-sequencer@29.7.0': + dependencies: + '@jest/test-result': 29.7.0 + graceful-fs: 4.2.11 + jest-haste-map: 29.7.0 + slash: 3.0.0 + '@jest/transform@27.5.1': dependencies: '@babel/core': 7.24.0 @@ -21964,11 +22866,31 @@ snapshots: transitivePeerDependencies: - supports-color + '@jest/transform@29.7.0': + dependencies: + '@babel/core': 7.24.0 + '@jest/types': 29.6.3 + '@jridgewell/trace-mapping': 0.3.25 + babel-plugin-istanbul: 6.1.1 + chalk: 4.1.2 + convert-source-map: 2.0.0 + fast-json-stable-stringify: 2.1.0 + graceful-fs: 4.2.11 + jest-haste-map: 29.7.0 + jest-regex-util: 29.6.3 + jest-util: 29.7.0 + micromatch: 4.0.8 + pirates: 4.0.6 + slash: 3.0.0 + write-file-atomic: 4.0.2 + transitivePeerDependencies: + - supports-color + '@jest/types@27.5.1': dependencies: '@types/istanbul-lib-coverage': 2.0.6 '@types/istanbul-reports': 3.0.4 - '@types/node': 22.13.9 + '@types/node': 22.5.4 '@types/yargs': 16.0.9 chalk: 4.1.2 @@ -21977,7 +22899,7 @@ snapshots: '@jest/schemas': 28.1.3 '@types/istanbul-lib-coverage': 2.0.6 '@types/istanbul-reports': 3.0.4 - '@types/node': 22.13.9 + '@types/node': 22.5.4 '@types/yargs': 17.0.32 chalk: 4.1.2 @@ -21986,7 +22908,7 @@ snapshots: '@jest/schemas': 29.6.3 '@types/istanbul-lib-coverage': 2.0.6 '@types/istanbul-reports': 3.0.4 - '@types/node': 22.13.9 + '@types/node': 22.5.4 '@types/yargs': 17.0.32 chalk: 4.1.2 @@ -22037,12 +22959,22 @@ snapshots: dependencies: tslib: 2.6.2 + '@keyv/redis@4.3.3': + dependencies: + cluster-key-slot: 1.1.2 + keyv: 5.3.2 + redis: 4.7.0 + + '@keyv/serialize@1.0.3': + dependencies: + buffer: 6.0.3 + '@ladle/react-context@1.0.1(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': dependencies: react: 18.2.0 react-dom: 18.2.0(react@18.2.0) - '@ladle/react@2.5.1(@types/node@22.13.9)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)(sass@1.71.1)(terser@5.29.1)(typescript@5.5.2)': + '@ladle/react@2.5.1(@types/node@22.5.4)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)(sass@1.71.1)(terser@5.29.1)(typescript@5.5.2)': dependencies: '@babel/code-frame': 7.26.2 '@babel/core': 7.24.0 @@ -22052,12 +22984,12 @@ snapshots: '@babel/preset-env': 7.24.5(@babel/core@7.24.0) '@babel/preset-react': 7.25.9(@babel/core@7.24.0) '@babel/preset-typescript': 7.18.6(@babel/core@7.24.0) - '@babel/runtime': 7.26.9 + '@babel/runtime': 7.26.10 '@babel/template': 7.25.9 '@babel/traverse': 7.25.9 '@babel/types': 7.26.0 '@ladle/react-context': 1.0.1(react-dom@18.2.0(react@18.2.0))(react@18.2.0) - '@vitejs/plugin-react': 3.1.0(vite@4.5.2(@types/node@22.13.9)(sass@1.71.1)(terser@5.29.1)) + '@vitejs/plugin-react': 3.1.0(vite@4.5.2(@types/node@22.5.4)(sass@1.71.1)(terser@5.29.1)) axe-core: 4.8.4 boxen: 7.1.1 chokidar: 3.6.0 @@ -22079,8 +23011,8 @@ snapshots: react-dom: 18.2.0(react@18.2.0) react-frame-component: 5.2.6(prop-types@15.8.1)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) react-inspector: 6.0.2(react@18.2.0) - vite: 4.5.2(@types/node@22.13.9)(sass@1.71.1)(terser@5.29.1) - vite-tsconfig-paths: 4.3.1(typescript@5.5.2)(vite@4.5.2(@types/node@22.13.9)(sass@1.71.1)(terser@5.29.1)) + vite: 4.5.2(@types/node@22.5.4)(sass@1.71.1)(terser@5.29.1) + vite-tsconfig-paths: 4.3.1(typescript@5.5.2)(vite@4.5.2(@types/node@22.5.4)(sass@1.71.1)(terser@5.29.1)) transitivePeerDependencies: - '@types/node' - less @@ -22135,19 +23067,19 @@ snapshots: - encoding - openai - '@langchain/community@0.3.37(@aws-crypto/sha256-js@5.2.0)(@aws-sdk/client-bedrock-agent-runtime@3.755.0)(@aws-sdk/client-bedrock-runtime@3.422.0)(@aws-sdk/client-dynamodb@3.529.1)(@aws-sdk/client-kendra@3.750.0)(@aws-sdk/client-s3@3.529.1)(@aws-sdk/credential-provider-node@3.529.1)(@browserbasehq/sdk@2.0.0(encoding@0.1.13))(@browserbasehq/stagehand@1.9.0(@playwright/test@1.49.1)(bufferutil@4.0.8)(deepmerge@4.3.1)(dotenv@16.4.5)(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(utf-8-validate@6.0.4)(zod@3.22.4))(@datastax/astra-db-ts@1.5.0)(@elastic/elasticsearch@8.12.2)(@getzep/zep-cloud@1.0.7(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(langchain@0.3.5(@langchain/anthropic@0.3.14(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13))(@langchain/aws@0.1.4(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/cohere@0.0.7(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/google-genai@0.2.3(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(zod@3.22.4))(@langchain/google-vertexai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(zod@3.22.4))(@langchain/groq@0.1.2(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)))(@langchain/mistralai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/ollama@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(axios@1.7.9)(cheerio@1.0.0-rc.12)(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(typeorm@0.3.20(ioredis@5.3.2)(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2)))(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))))(@getzep/zep-js@0.9.0)(@gomomento/sdk-core@1.68.1)(@gomomento/sdk@1.68.1(encoding@0.1.13))(@google-ai/generativelanguage@2.6.0(encoding@0.1.13))(@google-cloud/storage@7.16.0(encoding@0.1.13))(@huggingface/inference@2.6.4)(@ibm-cloud/watsonx-ai@1.1.2)(@langchain/anthropic@0.3.14(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13))(@langchain/aws@0.1.4(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/cohere@0.0.7(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/google-genai@0.2.3(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(zod@3.22.4))(@langchain/google-vertexai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(zod@3.22.4))(@langchain/groq@0.1.2(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)))(@langchain/mistralai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/ollama@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@mendable/firecrawl-js@0.0.28)(@notionhq/client@2.2.14(encoding@0.1.13))(@opensearch-project/opensearch@1.2.0)(@pinecone-database/pinecone@4.0.0)(@qdrant/js-client-rest@1.9.0(typescript@5.5.2))(@smithy/eventstream-codec@4.0.1)(@smithy/protocol-http@5.0.1)(@smithy/signature-v4@5.0.1)(@smithy/util-utf8@4.0.0)(@supabase/supabase-js@2.39.8(bufferutil@4.0.8)(utf-8-validate@6.0.4))(@upstash/redis@1.22.1(encoding@0.1.13))(@upstash/vector@1.1.5)(@zilliz/milvus2-sdk-node@2.3.5)(apify-client@2.9.3)(assemblyai@4.3.2(bufferutil@4.0.8)(utf-8-validate@6.0.4))(axios@1.7.9)(cheerio@1.0.0-rc.12)(chromadb@1.10.0(@google/generative-ai@0.24.0)(cohere-ai@7.10.0(encoding@0.1.13))(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(cohere-ai@7.10.0(encoding@0.1.13))(crypto-js@4.2.0)(d3-dsv@2.0.0)(encoding@0.1.13)(epub2@3.0.2(ts-toolbelt@9.6.0))(fast-xml-parser@4.4.1)(google-auth-library@9.6.3(encoding@0.1.13))(html-to-text@9.0.5)(ibm-cloud-sdk-core@5.1.0)(ignore@5.3.1)(ioredis@5.3.2)(jsdom@22.1.0(bufferutil@4.0.8)(canvas@2.11.2(encoding@0.1.13))(utf-8-validate@6.0.4))(jsonwebtoken@9.0.2)(lodash@4.17.21)(lunary@0.7.12(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(react@18.2.0))(mammoth@1.7.0)(mem0ai@2.1.12(@anthropic-ai/sdk@0.37.0(encoding@0.1.13))(@qdrant/js-client-rest@1.9.0(typescript@5.5.2))(@supabase/supabase-js@2.39.8(bufferutil@4.0.8)(utf-8-validate@6.0.4))(@types/jest@29.5.12)(@types/pg@8.11.2)(@types/sqlite3@3.1.11)(encoding@0.1.13)(groq-sdk@0.5.0(encoding@0.1.13))(neo4j-driver@5.27.0)(ollama@0.5.11)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)))(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(neo4j-driver@5.27.0)(notion-to-md@3.1.1(encoding@0.1.13))(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(pdf-parse@1.1.1)(pg@8.11.3)(playwright@1.42.1)(portkey-ai@0.1.16)(puppeteer@20.9.0(bufferutil@4.0.8)(encoding@0.1.13)(typescript@5.5.2)(utf-8-validate@6.0.4))(pyodide@0.25.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(redis@4.6.13)(replicate@0.31.1)(srt-parser-2@1.2.3)(typeorm@0.3.20(ioredis@5.3.2)(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2)))(weaviate-ts-client@1.6.0(encoding@0.1.13)(graphql@16.8.1))(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))': + '@langchain/community@0.3.40(@aws-crypto/sha256-js@5.2.0)(@aws-sdk/client-bedrock-agent-runtime@3.755.0)(@aws-sdk/client-bedrock-runtime@3.422.0)(@aws-sdk/client-dynamodb@3.529.1)(@aws-sdk/client-kendra@3.750.0)(@aws-sdk/client-s3@3.529.1)(@aws-sdk/credential-provider-node@3.529.1)(@browserbasehq/sdk@2.0.0(encoding@0.1.13))(@browserbasehq/stagehand@1.9.0(@playwright/test@1.49.1)(bufferutil@4.0.8)(deepmerge@4.3.1)(dotenv@16.4.5)(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(utf-8-validate@6.0.4)(zod@3.22.4))(@datastax/astra-db-ts@1.5.0)(@elastic/elasticsearch@8.12.2)(@getzep/zep-cloud@1.0.7(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(langchain@0.3.6(@langchain/anthropic@0.3.14(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13))(@langchain/aws@0.1.4(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/cohere@0.0.7(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/google-genai@0.2.3(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(zod@3.22.4))(@langchain/google-vertexai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(zod@3.22.4))(@langchain/groq@0.1.2(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)))(@langchain/mistralai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/ollama@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(axios@1.7.9)(cheerio@1.0.0-rc.12)(encoding@0.1.13)(handlebars@4.7.8)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(typeorm@0.3.20(ioredis@5.3.2)(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2)))(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))))(@getzep/zep-js@0.9.0)(@gomomento/sdk-core@1.68.1)(@gomomento/sdk@1.68.1(encoding@0.1.13))(@google-ai/generativelanguage@2.6.0(encoding@0.1.13))(@google-cloud/storage@7.16.0(encoding@0.1.13))(@huggingface/inference@2.6.4)(@ibm-cloud/watsonx-ai@1.2.0)(@langchain/anthropic@0.3.14(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13))(@langchain/aws@0.1.4(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/cohere@0.0.7(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/google-genai@0.2.3(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(zod@3.22.4))(@langchain/google-vertexai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(zod@3.22.4))(@langchain/groq@0.1.2(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)))(@langchain/mistralai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/ollama@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@mendable/firecrawl-js@1.25.1)(@notionhq/client@2.2.14(encoding@0.1.13))(@opensearch-project/opensearch@1.2.0)(@pinecone-database/pinecone@4.0.0)(@qdrant/js-client-rest@1.9.0(typescript@5.5.2))(@smithy/eventstream-codec@4.0.1)(@smithy/protocol-http@5.0.1)(@smithy/signature-v4@5.0.1)(@smithy/util-utf8@4.0.0)(@supabase/supabase-js@2.39.8(bufferutil@4.0.8)(utf-8-validate@6.0.4))(@upstash/redis@1.22.1(encoding@0.1.13))(@upstash/vector@1.1.5)(@zilliz/milvus2-sdk-node@2.3.5)(apify-client@2.9.3)(assemblyai@4.3.2(bufferutil@4.0.8)(utf-8-validate@6.0.4))(axios@1.7.9)(cheerio@1.0.0-rc.12)(chromadb@1.10.3(@google/generative-ai@0.24.0)(cohere-ai@7.10.0(encoding@0.1.13))(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(cohere-ai@7.10.0(encoding@0.1.13))(crypto-js@4.2.0)(d3-dsv@2.0.0)(encoding@0.1.13)(epub2@3.0.2(ts-toolbelt@9.6.0))(fast-xml-parser@4.4.1)(google-auth-library@9.6.3(encoding@0.1.13))(handlebars@4.7.8)(html-to-text@9.0.5)(ibm-cloud-sdk-core@5.1.0)(ignore@5.3.1)(ioredis@5.3.2)(jsdom@22.1.0(bufferutil@4.0.8)(canvas@2.11.2(encoding@0.1.13))(utf-8-validate@6.0.4))(jsonwebtoken@9.0.2)(lodash@4.17.21)(lunary@0.7.12(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(react@18.2.0))(mammoth@1.7.0)(mem0ai@2.1.16(@anthropic-ai/sdk@0.37.0(encoding@0.1.13))(@google/genai@0.7.0(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.4))(@mistralai/mistralai@0.1.3(encoding@0.1.13))(@qdrant/js-client-rest@1.9.0(typescript@5.5.2))(@supabase/supabase-js@2.39.8(bufferutil@4.0.8)(utf-8-validate@6.0.4))(@types/jest@29.5.14)(@types/pg@8.11.2)(@types/sqlite3@3.1.11)(encoding@0.1.13)(groq-sdk@0.5.0(encoding@0.1.13))(neo4j-driver@5.27.0)(ollama@0.5.11)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)))(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(neo4j-driver@5.27.0)(notion-to-md@3.1.1(encoding@0.1.13))(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(pdf-parse@1.1.1)(pg@8.11.3)(playwright@1.42.1)(portkey-ai@0.1.16)(puppeteer@20.9.0(bufferutil@4.0.8)(encoding@0.1.13)(typescript@5.5.2)(utf-8-validate@6.0.4))(pyodide@0.25.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(redis@4.6.13)(replicate@0.31.1)(srt-parser-2@1.2.3)(typeorm@0.3.20(ioredis@5.3.2)(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2)))(weaviate-ts-client@1.6.0(encoding@0.1.13)(graphql@16.8.1))(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))': dependencies: '@browserbasehq/stagehand': 1.9.0(@playwright/test@1.49.1)(bufferutil@4.0.8)(deepmerge@4.3.1)(dotenv@16.4.5)(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(utf-8-validate@6.0.4)(zod@3.22.4) - '@ibm-cloud/watsonx-ai': 1.1.2 + '@ibm-cloud/watsonx-ai': 1.2.0 '@langchain/core': 0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)) - '@langchain/openai': 0.4.4(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)) + '@langchain/openai': 0.5.6(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)) binary-extensions: 2.2.0 expr-eval: 2.0.2 flat: 5.0.2 ibm-cloud-sdk-core: 5.1.0 js-yaml: 4.1.0 - langchain: 0.3.5(@langchain/anthropic@0.3.14(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13))(@langchain/aws@0.1.4(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/cohere@0.0.7(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/google-genai@0.2.3(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(zod@3.22.4))(@langchain/google-vertexai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(zod@3.22.4))(@langchain/groq@0.1.2(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)))(@langchain/mistralai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/ollama@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(axios@1.7.9)(cheerio@1.0.0-rc.12)(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(typeorm@0.3.20(ioredis@5.3.2)(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2)))(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)) - langsmith: 0.2.15(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)) + langchain: 0.3.6(@langchain/anthropic@0.3.14(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13))(@langchain/aws@0.1.4(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/cohere@0.0.7(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/google-genai@0.2.3(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(zod@3.22.4))(@langchain/google-vertexai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(zod@3.22.4))(@langchain/groq@0.1.2(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)))(@langchain/mistralai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/ollama@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(axios@1.7.9)(cheerio@1.0.0-rc.12)(encoding@0.1.13)(handlebars@4.7.8)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(typeorm@0.3.20(ioredis@5.3.2)(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2)))(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)) + langsmith: 0.2.8(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)) openai: 4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4) uuid: 10.0.0 zod: 3.22.4 @@ -22163,14 +23095,14 @@ snapshots: '@browserbasehq/sdk': 2.0.0(encoding@0.1.13) '@datastax/astra-db-ts': 1.5.0 '@elastic/elasticsearch': 8.12.2 - '@getzep/zep-cloud': 1.0.7(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(langchain@0.3.5(@langchain/anthropic@0.3.14(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13))(@langchain/aws@0.1.4(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/cohere@0.0.7(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/google-genai@0.2.3(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(zod@3.22.4))(@langchain/google-vertexai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(zod@3.22.4))(@langchain/groq@0.1.2(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)))(@langchain/mistralai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/ollama@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(axios@1.7.9)(cheerio@1.0.0-rc.12)(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(typeorm@0.3.20(ioredis@5.3.2)(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2)))(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))) + '@getzep/zep-cloud': 1.0.7(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(langchain@0.3.6(@langchain/anthropic@0.3.14(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13))(@langchain/aws@0.1.4(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/cohere@0.0.7(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/google-genai@0.2.3(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(zod@3.22.4))(@langchain/google-vertexai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(zod@3.22.4))(@langchain/groq@0.1.2(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)))(@langchain/mistralai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/ollama@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(axios@1.7.9)(cheerio@1.0.0-rc.12)(encoding@0.1.13)(handlebars@4.7.8)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(typeorm@0.3.20(ioredis@5.3.2)(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2)))(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))) '@getzep/zep-js': 0.9.0 '@gomomento/sdk': 1.68.1(encoding@0.1.13) '@gomomento/sdk-core': 1.68.1 '@google-ai/generativelanguage': 2.6.0(encoding@0.1.13) '@google-cloud/storage': 7.16.0(encoding@0.1.13) '@huggingface/inference': 2.6.4 - '@mendable/firecrawl-js': 0.0.28 + '@mendable/firecrawl-js': 1.25.1 '@notionhq/client': 2.2.14(encoding@0.1.13) '@opensearch-project/opensearch': 1.2.0 '@pinecone-database/pinecone': 4.0.0 @@ -22186,7 +23118,7 @@ snapshots: apify-client: 2.9.3 assemblyai: 4.3.2(bufferutil@4.0.8)(utf-8-validate@6.0.4) cheerio: 1.0.0-rc.12 - chromadb: 1.10.0(@google/generative-ai@0.24.0)(cohere-ai@7.10.0(encoding@0.1.13))(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)) + chromadb: 1.10.3(@google/generative-ai@0.24.0)(cohere-ai@7.10.0(encoding@0.1.13))(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)) cohere-ai: 7.10.0(encoding@0.1.13) crypto-js: 4.2.0 d3-dsv: 2.0.0 @@ -22201,7 +23133,7 @@ snapshots: lodash: 4.17.21 lunary: 0.7.12(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(react@18.2.0) mammoth: 1.7.0 - mem0ai: 2.1.12(@anthropic-ai/sdk@0.37.0(encoding@0.1.13))(@qdrant/js-client-rest@1.9.0(typescript@5.5.2))(@supabase/supabase-js@2.39.8(bufferutil@4.0.8)(utf-8-validate@6.0.4))(@types/jest@29.5.12)(@types/pg@8.11.2)(@types/sqlite3@3.1.11)(encoding@0.1.13)(groq-sdk@0.5.0(encoding@0.1.13))(neo4j-driver@5.27.0)(ollama@0.5.11)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)) + mem0ai: 2.1.16(@anthropic-ai/sdk@0.37.0(encoding@0.1.13))(@google/genai@0.7.0(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.4))(@mistralai/mistralai@0.1.3(encoding@0.1.13))(@qdrant/js-client-rest@1.9.0(typescript@5.5.2))(@supabase/supabase-js@2.39.8(bufferutil@4.0.8)(utf-8-validate@6.0.4))(@types/jest@29.5.14)(@types/pg@8.11.2)(@types/sqlite3@3.1.11)(encoding@0.1.13)(groq-sdk@0.5.0(encoding@0.1.13))(neo4j-driver@5.27.0)(ollama@0.5.11)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)) mongodb: 6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1) mysql2: 3.11.4 neo4j-driver: 5.27.0 @@ -22215,7 +23147,7 @@ snapshots: redis: 4.6.13 replicate: 0.31.1 srt-parser-2: 1.2.3 - typeorm: 0.3.20(ioredis@5.3.2)(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2)) + typeorm: 0.3.20(ioredis@5.3.2)(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2)) weaviate-ts-client: 1.6.0(encoding@0.1.13)(graphql@16.8.1) ws: 8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4) transitivePeerDependencies: @@ -22239,7 +23171,7 @@ snapshots: camelcase: 6.3.0 decamelize: 1.2.0 js-tiktoken: 1.0.12 - langsmith: 0.2.15(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)) + langsmith: 0.2.8(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)) mustache: 4.2.0 p-queue: 6.6.2 p-retry: 4.6.2 @@ -22317,7 +23249,7 @@ snapshots: '@mistralai/mistralai': 1.3.6(zod@3.23.8) uuid: 10.0.0 zod: 3.23.8 - zod-to-json-schema: 3.24.1(zod@3.23.8) + zod-to-json-schema: 3.23.1(zod@3.23.8) '@langchain/mongodb@0.0.1(gcp-metadata@6.1.0(encoding@0.1.13))(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(socks@2.8.1)': dependencies: @@ -22352,17 +23284,6 @@ snapshots: - encoding - ws - '@langchain/openai@0.4.4(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))': - dependencies: - '@langchain/core': 0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)) - js-tiktoken: 1.0.12 - openai: 4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4) - zod: 3.22.4 - zod-to-json-schema: 3.24.1(zod@3.22.4) - transitivePeerDependencies: - - encoding - - ws - '@langchain/openai@0.5.6(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))': dependencies: '@langchain/core': 0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)) @@ -22419,10 +23340,22 @@ snapshots: '@lezer/common@1.2.1': {} + '@lezer/css@1.1.8': + dependencies: + '@lezer/common': 1.2.1 + '@lezer/highlight': 1.2.1 + '@lezer/lr': 1.4.0 + '@lezer/highlight@1.2.1': dependencies: '@lezer/common': 1.2.1 + '@lezer/html@1.3.10': + dependencies: + '@lezer/common': 1.2.1 + '@lezer/highlight': 1.2.1 + '@lezer/lr': 1.4.0 + '@lezer/javascript@1.4.13': dependencies: '@lezer/common': 1.2.1 @@ -22439,6 +23372,11 @@ snapshots: dependencies: '@lezer/common': 1.2.1 + '@lezer/markdown@1.3.0': + dependencies: + '@lezer/common': 1.2.1 + '@lezer/highlight': 1.2.1 + '@llamaindex/cloud@0.0.5(node-fetch@2.7.0(encoding@0.1.13))': dependencies: '@types/qs': 6.9.12 @@ -22471,12 +23409,12 @@ snapshots: - encoding - supports-color - '@mem0/community@0.0.1(@anthropic-ai/sdk@0.37.0(encoding@0.1.13))(@aws-crypto/sha256-js@5.2.0)(@aws-sdk/client-bedrock-agent-runtime@3.755.0)(@aws-sdk/client-bedrock-runtime@3.422.0)(@aws-sdk/client-dynamodb@3.529.1)(@aws-sdk/client-kendra@3.750.0)(@aws-sdk/client-s3@3.529.1)(@aws-sdk/credential-provider-node@3.529.1)(@browserbasehq/sdk@2.0.0(encoding@0.1.13))(@browserbasehq/stagehand@1.9.0(@playwright/test@1.49.1)(bufferutil@4.0.8)(deepmerge@4.3.1)(dotenv@16.4.5)(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(utf-8-validate@6.0.4)(zod@3.22.4))(@datastax/astra-db-ts@1.5.0)(@elastic/elasticsearch@8.12.2)(@getzep/zep-cloud@1.0.7(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(langchain@0.3.5(@langchain/anthropic@0.3.14(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13))(@langchain/aws@0.1.4(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/cohere@0.0.7(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/google-genai@0.2.3(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(zod@3.22.4))(@langchain/google-vertexai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(zod@3.22.4))(@langchain/groq@0.1.2(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)))(@langchain/mistralai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/ollama@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(axios@1.7.9)(cheerio@1.0.0-rc.12)(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(typeorm@0.3.20(ioredis@5.3.2)(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2)))(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))))(@getzep/zep-js@0.9.0)(@gomomento/sdk-core@1.68.1)(@gomomento/sdk@1.68.1(encoding@0.1.13))(@google-ai/generativelanguage@2.6.0(encoding@0.1.13))(@google-cloud/storage@7.16.0(encoding@0.1.13))(@huggingface/inference@2.6.4)(@ibm-cloud/watsonx-ai@1.1.2)(@langchain/anthropic@0.3.14(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13))(@langchain/aws@0.1.4(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/cohere@0.0.7(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/google-genai@0.2.3(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(zod@3.22.4))(@langchain/google-vertexai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(zod@3.22.4))(@langchain/groq@0.1.2(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)))(@langchain/mistralai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/ollama@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@mendable/firecrawl-js@0.0.28)(@notionhq/client@2.2.14(encoding@0.1.13))(@opensearch-project/opensearch@1.2.0)(@pinecone-database/pinecone@4.0.0)(@qdrant/js-client-rest@1.9.0(typescript@5.5.2))(@smithy/eventstream-codec@4.0.1)(@smithy/protocol-http@5.0.1)(@smithy/signature-v4@5.0.1)(@smithy/util-utf8@4.0.0)(@supabase/supabase-js@2.39.8(bufferutil@4.0.8)(utf-8-validate@6.0.4))(@types/jest@29.5.12)(@types/pg@8.11.2)(@types/sqlite3@3.1.11)(@upstash/redis@1.22.1(encoding@0.1.13))(@upstash/vector@1.1.5)(@zilliz/milvus2-sdk-node@2.3.5)(apify-client@2.9.3)(assemblyai@4.3.2(bufferutil@4.0.8)(utf-8-validate@6.0.4))(cheerio@1.0.0-rc.12)(chromadb@1.10.0(@google/generative-ai@0.24.0)(cohere-ai@7.10.0(encoding@0.1.13))(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(cohere-ai@7.10.0(encoding@0.1.13))(crypto-js@4.2.0)(d3-dsv@2.0.0)(encoding@0.1.13)(epub2@3.0.2(ts-toolbelt@9.6.0))(fast-xml-parser@4.4.1)(google-auth-library@9.6.3(encoding@0.1.13))(groq-sdk@0.5.0(encoding@0.1.13))(html-to-text@9.0.5)(ibm-cloud-sdk-core@5.1.0)(ignore@5.3.1)(ioredis@5.3.2)(jsdom@22.1.0(bufferutil@4.0.8)(canvas@2.11.2(encoding@0.1.13))(utf-8-validate@6.0.4))(jsonwebtoken@9.0.2)(lodash@4.17.21)(lunary@0.7.12(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(react@18.2.0))(mammoth@1.7.0)(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(neo4j-driver@5.27.0)(notion-to-md@3.1.1(encoding@0.1.13))(ollama@0.5.11)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(pdf-parse@1.1.1)(pg@8.11.3)(playwright@1.42.1)(portkey-ai@0.1.16)(puppeteer@20.9.0(bufferutil@4.0.8)(encoding@0.1.13)(typescript@5.5.2)(utf-8-validate@6.0.4))(pyodide@0.25.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(redis@4.6.13)(replicate@0.31.1)(sqlite3@5.1.7)(srt-parser-2@1.2.3)(typeorm@0.3.20(ioredis@5.3.2)(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2)))(weaviate-ts-client@1.6.0(encoding@0.1.13)(graphql@16.8.1))(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))': + '@mem0/community@0.0.1(@anthropic-ai/sdk@0.37.0(encoding@0.1.13))(@aws-crypto/sha256-js@5.2.0)(@aws-sdk/client-bedrock-agent-runtime@3.755.0)(@aws-sdk/client-bedrock-runtime@3.422.0)(@aws-sdk/client-dynamodb@3.529.1)(@aws-sdk/client-kendra@3.750.0)(@aws-sdk/client-s3@3.529.1)(@aws-sdk/credential-provider-node@3.529.1)(@browserbasehq/sdk@2.0.0(encoding@0.1.13))(@browserbasehq/stagehand@1.9.0(@playwright/test@1.49.1)(bufferutil@4.0.8)(deepmerge@4.3.1)(dotenv@16.4.5)(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(utf-8-validate@6.0.4)(zod@3.22.4))(@datastax/astra-db-ts@1.5.0)(@elastic/elasticsearch@8.12.2)(@getzep/zep-cloud@1.0.7(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(langchain@0.3.6(@langchain/anthropic@0.3.14(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13))(@langchain/aws@0.1.4(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/cohere@0.0.7(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/google-genai@0.2.3(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(zod@3.22.4))(@langchain/google-vertexai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(zod@3.22.4))(@langchain/groq@0.1.2(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)))(@langchain/mistralai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/ollama@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(axios@1.7.9)(cheerio@1.0.0-rc.12)(encoding@0.1.13)(handlebars@4.7.8)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(typeorm@0.3.20(ioredis@5.3.2)(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2)))(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))))(@getzep/zep-js@0.9.0)(@gomomento/sdk-core@1.68.1)(@gomomento/sdk@1.68.1(encoding@0.1.13))(@google-ai/generativelanguage@2.6.0(encoding@0.1.13))(@google-cloud/storage@7.16.0(encoding@0.1.13))(@google/genai@0.7.0(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.4))(@huggingface/inference@2.6.4)(@ibm-cloud/watsonx-ai@1.2.0)(@langchain/anthropic@0.3.14(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13))(@langchain/aws@0.1.4(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/cohere@0.0.7(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/google-genai@0.2.3(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(zod@3.22.4))(@langchain/google-vertexai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(zod@3.22.4))(@langchain/groq@0.1.2(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)))(@langchain/mistralai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/ollama@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@mendable/firecrawl-js@1.25.1)(@mistralai/mistralai@0.1.3(encoding@0.1.13))(@notionhq/client@2.2.14(encoding@0.1.13))(@opensearch-project/opensearch@1.2.0)(@pinecone-database/pinecone@4.0.0)(@qdrant/js-client-rest@1.9.0(typescript@5.5.2))(@smithy/eventstream-codec@4.0.1)(@smithy/protocol-http@5.0.1)(@smithy/signature-v4@5.0.1)(@smithy/util-utf8@4.0.0)(@supabase/supabase-js@2.39.8(bufferutil@4.0.8)(utf-8-validate@6.0.4))(@types/jest@29.5.14)(@types/pg@8.11.2)(@types/sqlite3@3.1.11)(@upstash/redis@1.22.1(encoding@0.1.13))(@upstash/vector@1.1.5)(@zilliz/milvus2-sdk-node@2.3.5)(apify-client@2.9.3)(assemblyai@4.3.2(bufferutil@4.0.8)(utf-8-validate@6.0.4))(cheerio@1.0.0-rc.12)(chromadb@1.10.3(@google/generative-ai@0.24.0)(cohere-ai@7.10.0(encoding@0.1.13))(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(cohere-ai@7.10.0(encoding@0.1.13))(crypto-js@4.2.0)(d3-dsv@2.0.0)(encoding@0.1.13)(epub2@3.0.2(ts-toolbelt@9.6.0))(fast-xml-parser@4.4.1)(google-auth-library@9.6.3(encoding@0.1.13))(groq-sdk@0.5.0(encoding@0.1.13))(handlebars@4.7.8)(html-to-text@9.0.5)(ibm-cloud-sdk-core@5.1.0)(ignore@5.3.1)(ioredis@5.3.2)(jsdom@22.1.0(bufferutil@4.0.8)(canvas@2.11.2(encoding@0.1.13))(utf-8-validate@6.0.4))(jsonwebtoken@9.0.2)(lodash@4.17.21)(lunary@0.7.12(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(react@18.2.0))(mammoth@1.7.0)(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(neo4j-driver@5.27.0)(notion-to-md@3.1.1(encoding@0.1.13))(ollama@0.5.11)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(pdf-parse@1.1.1)(pg@8.11.3)(playwright@1.42.1)(portkey-ai@0.1.16)(puppeteer@20.9.0(bufferutil@4.0.8)(encoding@0.1.13)(typescript@5.5.2)(utf-8-validate@6.0.4))(pyodide@0.25.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(redis@4.6.13)(replicate@0.31.1)(sqlite3@5.1.7)(srt-parser-2@1.2.3)(typeorm@0.3.20(ioredis@5.3.2)(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2)))(weaviate-ts-client@1.6.0(encoding@0.1.13)(graphql@16.8.1))(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))': dependencies: - '@langchain/community': 0.3.37(@aws-crypto/sha256-js@5.2.0)(@aws-sdk/client-bedrock-agent-runtime@3.755.0)(@aws-sdk/client-bedrock-runtime@3.422.0)(@aws-sdk/client-dynamodb@3.529.1)(@aws-sdk/client-kendra@3.750.0)(@aws-sdk/client-s3@3.529.1)(@aws-sdk/credential-provider-node@3.529.1)(@browserbasehq/sdk@2.0.0(encoding@0.1.13))(@browserbasehq/stagehand@1.9.0(@playwright/test@1.49.1)(bufferutil@4.0.8)(deepmerge@4.3.1)(dotenv@16.4.5)(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(utf-8-validate@6.0.4)(zod@3.22.4))(@datastax/astra-db-ts@1.5.0)(@elastic/elasticsearch@8.12.2)(@getzep/zep-cloud@1.0.7(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(langchain@0.3.5(@langchain/anthropic@0.3.14(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13))(@langchain/aws@0.1.4(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/cohere@0.0.7(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/google-genai@0.2.3(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(zod@3.22.4))(@langchain/google-vertexai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(zod@3.22.4))(@langchain/groq@0.1.2(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)))(@langchain/mistralai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/ollama@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(axios@1.7.9)(cheerio@1.0.0-rc.12)(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(typeorm@0.3.20(ioredis@5.3.2)(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2)))(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))))(@getzep/zep-js@0.9.0)(@gomomento/sdk-core@1.68.1)(@gomomento/sdk@1.68.1(encoding@0.1.13))(@google-ai/generativelanguage@2.6.0(encoding@0.1.13))(@google-cloud/storage@7.16.0(encoding@0.1.13))(@huggingface/inference@2.6.4)(@ibm-cloud/watsonx-ai@1.1.2)(@langchain/anthropic@0.3.14(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13))(@langchain/aws@0.1.4(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/cohere@0.0.7(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/google-genai@0.2.3(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(zod@3.22.4))(@langchain/google-vertexai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(zod@3.22.4))(@langchain/groq@0.1.2(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)))(@langchain/mistralai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/ollama@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@mendable/firecrawl-js@0.0.28)(@notionhq/client@2.2.14(encoding@0.1.13))(@opensearch-project/opensearch@1.2.0)(@pinecone-database/pinecone@4.0.0)(@qdrant/js-client-rest@1.9.0(typescript@5.5.2))(@smithy/eventstream-codec@4.0.1)(@smithy/protocol-http@5.0.1)(@smithy/signature-v4@5.0.1)(@smithy/util-utf8@4.0.0)(@supabase/supabase-js@2.39.8(bufferutil@4.0.8)(utf-8-validate@6.0.4))(@upstash/redis@1.22.1(encoding@0.1.13))(@upstash/vector@1.1.5)(@zilliz/milvus2-sdk-node@2.3.5)(apify-client@2.9.3)(assemblyai@4.3.2(bufferutil@4.0.8)(utf-8-validate@6.0.4))(axios@1.7.9)(cheerio@1.0.0-rc.12)(chromadb@1.10.0(@google/generative-ai@0.24.0)(cohere-ai@7.10.0(encoding@0.1.13))(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(cohere-ai@7.10.0(encoding@0.1.13))(crypto-js@4.2.0)(d3-dsv@2.0.0)(encoding@0.1.13)(epub2@3.0.2(ts-toolbelt@9.6.0))(fast-xml-parser@4.4.1)(google-auth-library@9.6.3(encoding@0.1.13))(html-to-text@9.0.5)(ibm-cloud-sdk-core@5.1.0)(ignore@5.3.1)(ioredis@5.3.2)(jsdom@22.1.0(bufferutil@4.0.8)(canvas@2.11.2(encoding@0.1.13))(utf-8-validate@6.0.4))(jsonwebtoken@9.0.2)(lodash@4.17.21)(lunary@0.7.12(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(react@18.2.0))(mammoth@1.7.0)(mem0ai@2.1.12(@anthropic-ai/sdk@0.37.0(encoding@0.1.13))(@qdrant/js-client-rest@1.9.0(typescript@5.5.2))(@supabase/supabase-js@2.39.8(bufferutil@4.0.8)(utf-8-validate@6.0.4))(@types/jest@29.5.12)(@types/pg@8.11.2)(@types/sqlite3@3.1.11)(encoding@0.1.13)(groq-sdk@0.5.0(encoding@0.1.13))(neo4j-driver@5.27.0)(ollama@0.5.11)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)))(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(neo4j-driver@5.27.0)(notion-to-md@3.1.1(encoding@0.1.13))(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(pdf-parse@1.1.1)(pg@8.11.3)(playwright@1.42.1)(portkey-ai@0.1.16)(puppeteer@20.9.0(bufferutil@4.0.8)(encoding@0.1.13)(typescript@5.5.2)(utf-8-validate@6.0.4))(pyodide@0.25.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(redis@4.6.13)(replicate@0.31.1)(srt-parser-2@1.2.3)(typeorm@0.3.20(ioredis@5.3.2)(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2)))(weaviate-ts-client@1.6.0(encoding@0.1.13)(graphql@16.8.1))(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)) + '@langchain/community': 0.3.40(@aws-crypto/sha256-js@5.2.0)(@aws-sdk/client-bedrock-agent-runtime@3.755.0)(@aws-sdk/client-bedrock-runtime@3.422.0)(@aws-sdk/client-dynamodb@3.529.1)(@aws-sdk/client-kendra@3.750.0)(@aws-sdk/client-s3@3.529.1)(@aws-sdk/credential-provider-node@3.529.1)(@browserbasehq/sdk@2.0.0(encoding@0.1.13))(@browserbasehq/stagehand@1.9.0(@playwright/test@1.49.1)(bufferutil@4.0.8)(deepmerge@4.3.1)(dotenv@16.4.5)(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(utf-8-validate@6.0.4)(zod@3.22.4))(@datastax/astra-db-ts@1.5.0)(@elastic/elasticsearch@8.12.2)(@getzep/zep-cloud@1.0.7(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(langchain@0.3.6(@langchain/anthropic@0.3.14(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13))(@langchain/aws@0.1.4(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/cohere@0.0.7(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/google-genai@0.2.3(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(zod@3.22.4))(@langchain/google-vertexai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(zod@3.22.4))(@langchain/groq@0.1.2(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)))(@langchain/mistralai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/ollama@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(axios@1.7.9)(cheerio@1.0.0-rc.12)(encoding@0.1.13)(handlebars@4.7.8)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(typeorm@0.3.20(ioredis@5.3.2)(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2)))(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))))(@getzep/zep-js@0.9.0)(@gomomento/sdk-core@1.68.1)(@gomomento/sdk@1.68.1(encoding@0.1.13))(@google-ai/generativelanguage@2.6.0(encoding@0.1.13))(@google-cloud/storage@7.16.0(encoding@0.1.13))(@huggingface/inference@2.6.4)(@ibm-cloud/watsonx-ai@1.2.0)(@langchain/anthropic@0.3.14(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13))(@langchain/aws@0.1.4(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/cohere@0.0.7(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/google-genai@0.2.3(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(zod@3.22.4))(@langchain/google-vertexai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(zod@3.22.4))(@langchain/groq@0.1.2(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)))(@langchain/mistralai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/ollama@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@mendable/firecrawl-js@1.25.1)(@notionhq/client@2.2.14(encoding@0.1.13))(@opensearch-project/opensearch@1.2.0)(@pinecone-database/pinecone@4.0.0)(@qdrant/js-client-rest@1.9.0(typescript@5.5.2))(@smithy/eventstream-codec@4.0.1)(@smithy/protocol-http@5.0.1)(@smithy/signature-v4@5.0.1)(@smithy/util-utf8@4.0.0)(@supabase/supabase-js@2.39.8(bufferutil@4.0.8)(utf-8-validate@6.0.4))(@upstash/redis@1.22.1(encoding@0.1.13))(@upstash/vector@1.1.5)(@zilliz/milvus2-sdk-node@2.3.5)(apify-client@2.9.3)(assemblyai@4.3.2(bufferutil@4.0.8)(utf-8-validate@6.0.4))(axios@1.7.9)(cheerio@1.0.0-rc.12)(chromadb@1.10.3(@google/generative-ai@0.24.0)(cohere-ai@7.10.0(encoding@0.1.13))(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(cohere-ai@7.10.0(encoding@0.1.13))(crypto-js@4.2.0)(d3-dsv@2.0.0)(encoding@0.1.13)(epub2@3.0.2(ts-toolbelt@9.6.0))(fast-xml-parser@4.4.1)(google-auth-library@9.6.3(encoding@0.1.13))(handlebars@4.7.8)(html-to-text@9.0.5)(ibm-cloud-sdk-core@5.1.0)(ignore@5.3.1)(ioredis@5.3.2)(jsdom@22.1.0(bufferutil@4.0.8)(canvas@2.11.2(encoding@0.1.13))(utf-8-validate@6.0.4))(jsonwebtoken@9.0.2)(lodash@4.17.21)(lunary@0.7.12(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(react@18.2.0))(mammoth@1.7.0)(mem0ai@2.1.16(@anthropic-ai/sdk@0.37.0(encoding@0.1.13))(@google/genai@0.7.0(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.4))(@mistralai/mistralai@0.1.3(encoding@0.1.13))(@qdrant/js-client-rest@1.9.0(typescript@5.5.2))(@supabase/supabase-js@2.39.8(bufferutil@4.0.8)(utf-8-validate@6.0.4))(@types/jest@29.5.14)(@types/pg@8.11.2)(@types/sqlite3@3.1.11)(encoding@0.1.13)(groq-sdk@0.5.0(encoding@0.1.13))(neo4j-driver@5.27.0)(ollama@0.5.11)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)))(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(neo4j-driver@5.27.0)(notion-to-md@3.1.1(encoding@0.1.13))(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(pdf-parse@1.1.1)(pg@8.11.3)(playwright@1.42.1)(portkey-ai@0.1.16)(puppeteer@20.9.0(bufferutil@4.0.8)(encoding@0.1.13)(typescript@5.5.2)(utf-8-validate@6.0.4))(pyodide@0.25.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(redis@4.6.13)(replicate@0.31.1)(srt-parser-2@1.2.3)(typeorm@0.3.20(ioredis@5.3.2)(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2)))(weaviate-ts-client@1.6.0(encoding@0.1.13)(graphql@16.8.1))(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)) '@langchain/core': 0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)) axios: 1.7.9(debug@4.3.4) - mem0ai: 2.1.12(@anthropic-ai/sdk@0.37.0(encoding@0.1.13))(@qdrant/js-client-rest@1.9.0(typescript@5.5.2))(@supabase/supabase-js@2.39.8(bufferutil@4.0.8)(utf-8-validate@6.0.4))(@types/jest@29.5.12)(@types/pg@8.11.2)(@types/sqlite3@3.1.11)(encoding@0.1.13)(groq-sdk@0.5.0(encoding@0.1.13))(neo4j-driver@5.27.0)(ollama@0.5.11)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)) + mem0ai: 2.1.16(@anthropic-ai/sdk@0.37.0(encoding@0.1.13))(@google/genai@0.7.0(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.4))(@mistralai/mistralai@0.1.3(encoding@0.1.13))(@qdrant/js-client-rest@1.9.0(typescript@5.5.2))(@supabase/supabase-js@2.39.8(bufferutil@4.0.8)(utf-8-validate@6.0.4))(@types/jest@29.5.14)(@types/pg@8.11.2)(@types/sqlite3@3.1.11)(encoding@0.1.13)(groq-sdk@0.5.0(encoding@0.1.13))(neo4j-driver@5.27.0)(ollama@0.5.11)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)) uuid: 9.0.1 zod: 3.22.4 transitivePeerDependencies: @@ -22508,6 +23446,7 @@ snapshots: - '@gomomento/sdk-core' - '@google-ai/generativelanguage' - '@google-cloud/storage' + - '@google/genai' - '@gradientai/nodejs-sdk' - '@huggingface/inference' - '@huggingface/transformers' @@ -22524,6 +23463,7 @@ snapshots: - '@layerup/layerup-security' - '@libsql/client' - '@mendable/firecrawl-js' + - '@mistralai/mistralai' - '@mlc-ai/web-llm' - '@mozilla/readability' - '@neondatabase/serverless' @@ -22557,6 +23497,7 @@ snapshots: - '@zilliz/milvus2-sdk-node' - apify-client - assemblyai + - azion - better-sqlite3 - cassandra-driver - cborg @@ -22626,13 +23567,12 @@ snapshots: - ws - youtubei.js - '@mendable/firecrawl-js@0.0.28': + '@mendable/firecrawl-js@1.25.1': dependencies: axios: 1.7.9(debug@4.3.4) - dotenv: 16.4.5 - uuid: 9.0.1 - zod: 3.23.8 - zod-to-json-schema: 3.23.1(zod@3.23.8) + typescript-event-target: 1.1.1 + zod: 3.24.2 + zod-to-json-schema: 3.24.1(zod@3.24.2) transitivePeerDependencies: - debug @@ -22668,7 +23608,7 @@ snapshots: raw-body: 3.0.0 zod: 3.24.2 - '@modelcontextprotocol/sdk@1.10.1': + '@modelcontextprotocol/sdk@1.10.2': dependencies: content-type: 1.0.5 cors: 2.8.5 @@ -22683,6 +23623,22 @@ snapshots: transitivePeerDependencies: - supports-color + '@modelcontextprotocol/sdk@1.12.0': + dependencies: + ajv: 6.12.6 + content-type: 1.0.5 + cors: 2.8.5 + cross-spawn: 7.0.6 + eventsource: 3.0.5 + express: 5.0.1 + express-rate-limit: 7.5.0(express@5.0.1) + pkce-challenge: 5.0.0 + raw-body: 3.0.0 + zod: 3.25.32 + zod-to-json-schema: 3.24.1(zod@3.25.32) + transitivePeerDependencies: + - supports-color + '@modelcontextprotocol/server-brave-search@0.6.2': dependencies: '@modelcontextprotocol/sdk': 1.0.1 @@ -22690,7 +23646,7 @@ snapshots: '@modelcontextprotocol/server-github@2025.1.23': dependencies: '@modelcontextprotocol/sdk': 1.0.1 - '@types/node': 22.13.9 + '@types/node': 22.5.4 '@types/node-fetch': 2.6.12 node-fetch: 3.3.2 zod: 3.22.4 @@ -22737,9 +23693,9 @@ snapshots: '@mui/base@5.0.0-beta.27(@types/react@18.2.65)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': dependencies: - '@babel/runtime': 7.26.9 + '@babel/runtime': 7.26.10 '@floating-ui/react-dom': 2.0.8(react-dom@18.2.0(react@18.2.0))(react@18.2.0) - '@mui/types': 7.2.14(@types/react@18.2.65) + '@mui/types': 7.2.21(@types/react@18.2.65) '@mui/utils': 5.16.0(@types/react@18.2.65)(react@18.2.0) '@popperjs/core': 2.11.8 clsx: 2.1.1 @@ -22813,7 +23769,7 @@ snapshots: '@mui/private-theming@5.15.12(@types/react@18.2.65)(react@18.2.0)': dependencies: - '@babel/runtime': 7.26.9 + '@babel/runtime': 7.26.10 '@mui/utils': 5.16.0(@types/react@18.2.65)(react@18.2.0) prop-types: 15.8.1 react: 18.2.0 @@ -22822,7 +23778,7 @@ snapshots: '@mui/private-theming@6.4.6(@types/react@18.2.65)(react@18.2.0)': dependencies: - '@babel/runtime': 7.26.9 + '@babel/runtime': 7.26.10 '@mui/utils': 6.4.6(@types/react@18.2.65)(react@18.2.0) prop-types: 15.8.1 react: 18.2.0 @@ -22831,7 +23787,7 @@ snapshots: '@mui/styled-engine@5.15.11(@emotion/react@11.11.4(@types/react@18.2.65)(react@18.2.0))(@emotion/styled@11.11.0(@emotion/react@11.11.4(@types/react@18.2.65)(react@18.2.0))(@types/react@18.2.65)(react@18.2.0))(react@18.2.0)': dependencies: - '@babel/runtime': 7.26.9 + '@babel/runtime': 7.26.10 '@emotion/cache': 11.11.0 csstype: 3.1.3 prop-types: 15.8.1 @@ -22842,7 +23798,7 @@ snapshots: '@mui/styled-engine@6.4.6(@emotion/react@11.11.4(@types/react@18.2.65)(react@18.2.0))(@emotion/styled@11.11.0(@emotion/react@11.11.4(@types/react@18.2.65)(react@18.2.0))(@types/react@18.2.65)(react@18.2.0))(react@18.2.0)': dependencies: - '@babel/runtime': 7.26.9 + '@babel/runtime': 7.26.10 '@emotion/cache': 11.14.0 '@emotion/serialize': 1.3.3 '@emotion/sheet': 1.4.0 @@ -22855,10 +23811,10 @@ snapshots: '@mui/system@5.15.12(@emotion/react@11.11.4(@types/react@18.2.65)(react@18.2.0))(@emotion/styled@11.11.0(@emotion/react@11.11.4(@types/react@18.2.65)(react@18.2.0))(@types/react@18.2.65)(react@18.2.0))(@types/react@18.2.65)(react@18.2.0)': dependencies: - '@babel/runtime': 7.26.9 + '@babel/runtime': 7.26.10 '@mui/private-theming': 5.15.12(@types/react@18.2.65)(react@18.2.0) '@mui/styled-engine': 5.15.11(@emotion/react@11.11.4(@types/react@18.2.65)(react@18.2.0))(@emotion/styled@11.11.0(@emotion/react@11.11.4(@types/react@18.2.65)(react@18.2.0))(@types/react@18.2.65)(react@18.2.0))(react@18.2.0) - '@mui/types': 7.2.14(@types/react@18.2.65) + '@mui/types': 7.2.21(@types/react@18.2.65) '@mui/utils': 5.16.0(@types/react@18.2.65)(react@18.2.0) clsx: 2.1.1 csstype: 3.1.3 @@ -22871,7 +23827,7 @@ snapshots: '@mui/system@6.4.7(@emotion/react@11.11.4(@types/react@18.2.65)(react@18.2.0))(@emotion/styled@11.11.0(@emotion/react@11.11.4(@types/react@18.2.65)(react@18.2.0))(@types/react@18.2.65)(react@18.2.0))(@types/react@18.2.65)(react@18.2.0)': dependencies: - '@babel/runtime': 7.26.9 + '@babel/runtime': 7.26.10 '@mui/private-theming': 6.4.6(@types/react@18.2.65)(react@18.2.0) '@mui/styled-engine': 6.4.6(@emotion/react@11.11.4(@types/react@18.2.65)(react@18.2.0))(@emotion/styled@11.11.0(@emotion/react@11.11.4(@types/react@18.2.65)(react@18.2.0))(@types/react@18.2.65)(react@18.2.0))(react@18.2.0) '@mui/types': 7.2.21(@types/react@18.2.65) @@ -22899,8 +23855,8 @@ snapshots: '@mui/utils@5.15.12(@types/react@18.2.65)(react@18.2.0)': dependencies: - '@babel/runtime': 7.26.9 - '@types/prop-types': 15.7.11 + '@babel/runtime': 7.26.10 + '@types/prop-types': 15.7.14 prop-types: 15.8.1 react: 18.2.0 react-is: 18.2.0 @@ -22909,8 +23865,8 @@ snapshots: '@mui/utils@5.16.0(@types/react@18.2.65)(react@18.2.0)': dependencies: - '@babel/runtime': 7.26.9 - '@types/prop-types': 15.7.11 + '@babel/runtime': 7.26.10 + '@types/prop-types': 15.7.14 prop-types: 15.8.1 react: 18.2.0 react-is: 18.2.0 @@ -22919,7 +23875,7 @@ snapshots: '@mui/utils@6.4.6(@types/react@18.2.65)(react@18.2.0)': dependencies: - '@babel/runtime': 7.26.9 + '@babel/runtime': 7.26.10 '@mui/types': 7.2.21(@types/react@18.2.65) '@types/prop-types': 15.7.14 clsx: 2.1.1 @@ -22945,7 +23901,7 @@ snapshots: '@mui/x-internals@7.29.0(@types/react@18.2.65)(react@18.2.0)': dependencies: - '@babel/runtime': 7.26.9 + '@babel/runtime': 7.26.10 '@mui/utils': 6.4.6(@types/react@18.2.65)(react@18.2.0) react: 18.2.0 transitivePeerDependencies: @@ -22953,7 +23909,7 @@ snapshots: '@mui/x-tree-view@7.29.1(@emotion/react@11.11.4(@types/react@18.2.65)(react@18.2.0))(@emotion/styled@11.11.0(@emotion/react@11.11.4(@types/react@18.2.65)(react@18.2.0))(@types/react@18.2.65)(react@18.2.0))(@mui/material@5.15.0(@emotion/react@11.11.4(@types/react@18.2.65)(react@18.2.0))(@emotion/styled@11.11.0(@emotion/react@11.11.4(@types/react@18.2.65)(react@18.2.0))(@types/react@18.2.65)(react@18.2.0))(@types/react@18.2.65)(react-dom@18.2.0(react@18.2.0))(react@18.2.0))(@mui/system@6.4.7(@emotion/react@11.11.4(@types/react@18.2.65)(react@18.2.0))(@emotion/styled@11.11.0(@emotion/react@11.11.4(@types/react@18.2.65)(react@18.2.0))(@types/react@18.2.65)(react@18.2.0))(@types/react@18.2.65)(react@18.2.0))(@types/react@18.2.65)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': dependencies: - '@babel/runtime': 7.26.9 + '@babel/runtime': 7.26.10 '@mui/material': 5.15.0(@emotion/react@11.11.4(@types/react@18.2.65)(react@18.2.0))(@emotion/styled@11.11.0(@emotion/react@11.11.4(@types/react@18.2.65)(react@18.2.0))(@types/react@18.2.65)(react@18.2.0))(@types/react@18.2.65)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) '@mui/system': 6.4.7(@emotion/react@11.11.4(@types/react@18.2.65)(react@18.2.0))(@emotion/styled@11.11.0(@emotion/react@11.11.4(@types/react@18.2.65)(react@18.2.0))(@types/react@18.2.65)(react@18.2.0))(@types/react@18.2.65)(react@18.2.0) '@mui/utils': 6.4.6(@types/react@18.2.65)(react@18.2.0) @@ -22974,6 +23930,8 @@ snapshots: dependencies: eslint-scope: 5.1.1 + '@noble/hashes@1.8.0': {} + '@nodelib/fs.scandir@2.1.5': dependencies: '@nodelib/fs.stat': 2.0.5 @@ -23147,7 +24105,7 @@ snapshots: - bluebird - supports-color - '@oclif/core@2.15.0(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2)': + '@oclif/core@2.15.0(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2)': dependencies: '@types/cli-progress': 3.11.5 ansi-escapes: 4.3.2 @@ -23172,7 +24130,7 @@ snapshots: strip-ansi: 6.0.1 supports-color: 8.1.1 supports-hyperlinks: 2.3.0 - ts-node: 10.9.2(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2) + ts-node: 10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2) tslib: 2.6.2 widest-line: 3.1.0 wordwrap: 1.0.0 @@ -23203,18 +24161,18 @@ snapshots: wordwrap: 1.0.0 wrap-ansi: 7.0.0 - '@oclif/plugin-help@5.2.20(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2)': + '@oclif/plugin-help@5.2.20(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2)': dependencies: - '@oclif/core': 2.15.0(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2) + '@oclif/core': 2.15.0(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2) transitivePeerDependencies: - '@swc/core' - '@swc/wasm' - '@types/node' - typescript - '@oclif/plugin-not-found@2.4.3(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2)': + '@oclif/plugin-not-found@2.4.3(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2)': dependencies: - '@oclif/core': 2.15.0(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2) + '@oclif/core': 2.15.0(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2) chalk: 4.1.2 fast-levenshtein: 3.0.0 transitivePeerDependencies: @@ -23223,9 +24181,9 @@ snapshots: - '@types/node' - typescript - '@oclif/plugin-warn-if-update-available@2.1.1(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2)': + '@oclif/plugin-warn-if-update-available@2.1.1(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2)': dependencies: - '@oclif/core': 2.15.0(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2) + '@oclif/core': 2.15.0(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2) chalk: 4.1.2 debug: 4.4.0(supports-color@8.1.1) http-call: 5.3.0 @@ -23329,15 +24287,19 @@ snapshots: dependencies: '@opentelemetry/api': 1.9.0 + '@opentelemetry/api-logs@0.54.2': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/api@1.9.0': {} - '@opentelemetry/auto-instrumentations-node@0.52.0(@opentelemetry/api@1.9.0)(encoding@0.1.13)': + '@opentelemetry/auto-instrumentations-node@0.52.1(@opentelemetry/api@1.9.0)(encoding@0.1.13)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/instrumentation': 0.54.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.54.2(@opentelemetry/api@1.9.0) '@opentelemetry/instrumentation-amqplib': 0.43.0(@opentelemetry/api@1.9.0) - '@opentelemetry/instrumentation-aws-lambda': 0.46.0(@opentelemetry/api@1.9.0) - '@opentelemetry/instrumentation-aws-sdk': 0.45.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-aws-lambda': 0.47.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-aws-sdk': 0.46.0(@opentelemetry/api@1.9.0) '@opentelemetry/instrumentation-bunyan': 0.42.0(@opentelemetry/api@1.9.0) '@opentelemetry/instrumentation-cassandra-driver': 0.42.0(@opentelemetry/api@1.9.0) '@opentelemetry/instrumentation-connect': 0.40.0(@opentelemetry/api@1.9.0) @@ -23349,9 +24311,9 @@ snapshots: '@opentelemetry/instrumentation-fs': 0.16.0(@opentelemetry/api@1.9.0) '@opentelemetry/instrumentation-generic-pool': 0.40.0(@opentelemetry/api@1.9.0) '@opentelemetry/instrumentation-graphql': 0.44.0(@opentelemetry/api@1.9.0) - '@opentelemetry/instrumentation-grpc': 0.54.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-grpc': 0.54.2(@opentelemetry/api@1.9.0) '@opentelemetry/instrumentation-hapi': 0.42.0(@opentelemetry/api@1.9.0) - '@opentelemetry/instrumentation-http': 0.54.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-http': 0.54.2(@opentelemetry/api@1.9.0) '@opentelemetry/instrumentation-ioredis': 0.44.0(@opentelemetry/api@1.9.0) '@opentelemetry/instrumentation-kafkajs': 0.4.0(@opentelemetry/api@1.9.0) '@opentelemetry/instrumentation-knex': 0.41.0(@opentelemetry/api@1.9.0) @@ -23361,10 +24323,10 @@ snapshots: '@opentelemetry/instrumentation-mongodb': 0.48.0(@opentelemetry/api@1.9.0) '@opentelemetry/instrumentation-mongoose': 0.43.0(@opentelemetry/api@1.9.0) '@opentelemetry/instrumentation-mysql': 0.42.0(@opentelemetry/api@1.9.0) - '@opentelemetry/instrumentation-mysql2': 0.42.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-mysql2': 0.42.1(@opentelemetry/api@1.9.0) '@opentelemetry/instrumentation-nestjs-core': 0.41.0(@opentelemetry/api@1.9.0) '@opentelemetry/instrumentation-net': 0.40.0(@opentelemetry/api@1.9.0) - '@opentelemetry/instrumentation-pg': 0.47.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-pg': 0.47.1(@opentelemetry/api@1.9.0) '@opentelemetry/instrumentation-pino': 0.43.0(@opentelemetry/api@1.9.0) '@opentelemetry/instrumentation-redis': 0.43.0(@opentelemetry/api@1.9.0) '@opentelemetry/instrumentation-redis-4': 0.43.0(@opentelemetry/api@1.9.0) @@ -23372,7 +24334,7 @@ snapshots: '@opentelemetry/instrumentation-router': 0.41.0(@opentelemetry/api@1.9.0) '@opentelemetry/instrumentation-socket.io': 0.43.0(@opentelemetry/api@1.9.0) '@opentelemetry/instrumentation-tedious': 0.15.0(@opentelemetry/api@1.9.0) - '@opentelemetry/instrumentation-undici': 0.7.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation-undici': 0.7.1(@opentelemetry/api@1.9.0) '@opentelemetry/instrumentation-winston': 0.41.0(@opentelemetry/api@1.9.0) '@opentelemetry/resource-detector-alibaba-cloud': 0.29.4(@opentelemetry/api@1.9.0) '@opentelemetry/resource-detector-aws': 1.7.0(@opentelemetry/api@1.9.0) @@ -23380,7 +24342,7 @@ snapshots: '@opentelemetry/resource-detector-container': 0.5.0(@opentelemetry/api@1.9.0) '@opentelemetry/resource-detector-gcp': 0.29.13(@opentelemetry/api@1.9.0)(encoding@0.1.13) '@opentelemetry/resources': 1.27.0(@opentelemetry/api@1.9.0) - '@opentelemetry/sdk-node': 0.54.0(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-node': 0.54.2(@opentelemetry/api@1.9.0) transitivePeerDependencies: - encoding - supports-color @@ -23389,43 +24351,38 @@ snapshots: dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/core@1.26.0(@opentelemetry/api@1.9.0)': - dependencies: - '@opentelemetry/api': 1.9.0 - '@opentelemetry/semantic-conventions': 1.27.0 - '@opentelemetry/core@1.27.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 '@opentelemetry/semantic-conventions': 1.27.0 - '@opentelemetry/exporter-logs-otlp-grpc@0.54.0(@opentelemetry/api@1.9.0)': + '@opentelemetry/exporter-logs-otlp-grpc@0.54.2(@opentelemetry/api@1.9.0)': dependencies: '@grpc/grpc-js': 1.10.10 '@opentelemetry/api': 1.9.0 '@opentelemetry/core': 1.27.0(@opentelemetry/api@1.9.0) - '@opentelemetry/otlp-grpc-exporter-base': 0.54.0(@opentelemetry/api@1.9.0) - '@opentelemetry/otlp-transformer': 0.54.0(@opentelemetry/api@1.9.0) - '@opentelemetry/sdk-logs': 0.54.0(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-grpc-exporter-base': 0.54.2(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-transformer': 0.54.2(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-logs': 0.54.2(@opentelemetry/api@1.9.0) - '@opentelemetry/exporter-logs-otlp-http@0.54.0(@opentelemetry/api@1.9.0)': + '@opentelemetry/exporter-logs-otlp-http@0.54.2(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/api-logs': 0.54.0 + '@opentelemetry/api-logs': 0.54.2 '@opentelemetry/core': 1.27.0(@opentelemetry/api@1.9.0) - '@opentelemetry/otlp-exporter-base': 0.54.0(@opentelemetry/api@1.9.0) - '@opentelemetry/otlp-transformer': 0.54.0(@opentelemetry/api@1.9.0) - '@opentelemetry/sdk-logs': 0.54.0(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-exporter-base': 0.54.2(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-transformer': 0.54.2(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-logs': 0.54.2(@opentelemetry/api@1.9.0) - '@opentelemetry/exporter-logs-otlp-proto@0.54.0(@opentelemetry/api@1.9.0)': + '@opentelemetry/exporter-logs-otlp-proto@0.54.2(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/api-logs': 0.54.0 + '@opentelemetry/api-logs': 0.54.2 '@opentelemetry/core': 1.27.0(@opentelemetry/api@1.9.0) - '@opentelemetry/otlp-exporter-base': 0.54.0(@opentelemetry/api@1.9.0) - '@opentelemetry/otlp-transformer': 0.54.0(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-exporter-base': 0.54.2(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-transformer': 0.54.2(@opentelemetry/api@1.9.0) '@opentelemetry/resources': 1.27.0(@opentelemetry/api@1.9.0) - '@opentelemetry/sdk-logs': 0.54.0(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-logs': 0.54.2(@opentelemetry/api@1.9.0) '@opentelemetry/sdk-trace-base': 1.27.0(@opentelemetry/api@1.9.0) '@opentelemetry/exporter-metrics-otlp-grpc@0.54.0(@opentelemetry/api@1.9.0)': @@ -23469,6 +24426,16 @@ snapshots: '@opentelemetry/resources': 1.27.0(@opentelemetry/api@1.9.0) '@opentelemetry/sdk-trace-base': 1.27.0(@opentelemetry/api@1.9.0) + '@opentelemetry/exporter-trace-otlp-grpc@0.54.2(@opentelemetry/api@1.9.0)': + dependencies: + '@grpc/grpc-js': 1.10.10 + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.27.0(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-grpc-exporter-base': 0.54.2(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-transformer': 0.54.2(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.27.0(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-trace-base': 1.27.0(@opentelemetry/api@1.9.0) + '@opentelemetry/exporter-trace-otlp-http@0.54.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 @@ -23478,6 +24445,15 @@ snapshots: '@opentelemetry/resources': 1.27.0(@opentelemetry/api@1.9.0) '@opentelemetry/sdk-trace-base': 1.27.0(@opentelemetry/api@1.9.0) + '@opentelemetry/exporter-trace-otlp-http@0.54.2(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.27.0(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-exporter-base': 0.54.2(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-transformer': 0.54.2(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.27.0(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-trace-base': 1.27.0(@opentelemetry/api@1.9.0) + '@opentelemetry/exporter-trace-otlp-proto@0.54.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 @@ -23487,6 +24463,15 @@ snapshots: '@opentelemetry/resources': 1.27.0(@opentelemetry/api@1.9.0) '@opentelemetry/sdk-trace-base': 1.27.0(@opentelemetry/api@1.9.0) + '@opentelemetry/exporter-trace-otlp-proto@0.54.2(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.27.0(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-exporter-base': 0.54.2(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-transformer': 0.54.2(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.27.0(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-trace-base': 1.27.0(@opentelemetry/api@1.9.0) + '@opentelemetry/exporter-zipkin@1.27.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 @@ -23499,26 +24484,25 @@ snapshots: dependencies: '@opentelemetry/api': 1.9.0 '@opentelemetry/core': 1.27.0(@opentelemetry/api@1.9.0) - '@opentelemetry/instrumentation': 0.54.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.54.2(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.27.0 transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation-aws-lambda@0.46.0(@opentelemetry/api@1.9.0)': + '@opentelemetry/instrumentation-aws-lambda@0.47.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/instrumentation': 0.54.0(@opentelemetry/api@1.9.0) - '@opentelemetry/propagator-aws-xray': 1.26.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.54.2(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.27.0 '@types/aws-lambda': 8.10.143 transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation-aws-sdk@0.45.0(@opentelemetry/api@1.9.0)': + '@opentelemetry/instrumentation-aws-sdk@0.46.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 '@opentelemetry/core': 1.27.0(@opentelemetry/api@1.9.0) - '@opentelemetry/instrumentation': 0.54.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.54.2(@opentelemetry/api@1.9.0) '@opentelemetry/propagation-utils': 0.30.12(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.27.0 transitivePeerDependencies: @@ -23527,8 +24511,8 @@ snapshots: '@opentelemetry/instrumentation-bunyan@0.42.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/api-logs': 0.54.0 - '@opentelemetry/instrumentation': 0.54.0(@opentelemetry/api@1.9.0) + '@opentelemetry/api-logs': 0.54.2 + '@opentelemetry/instrumentation': 0.54.2(@opentelemetry/api@1.9.0) '@types/bunyan': 1.8.9 transitivePeerDependencies: - supports-color @@ -23536,7 +24520,7 @@ snapshots: '@opentelemetry/instrumentation-cassandra-driver@0.42.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/instrumentation': 0.54.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.54.2(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.27.0 transitivePeerDependencies: - supports-color @@ -23545,7 +24529,7 @@ snapshots: dependencies: '@opentelemetry/api': 1.9.0 '@opentelemetry/core': 1.27.0(@opentelemetry/api@1.9.0) - '@opentelemetry/instrumentation': 0.54.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.54.2(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.27.0 '@types/connect': 3.4.36 transitivePeerDependencies: @@ -23554,7 +24538,7 @@ snapshots: '@opentelemetry/instrumentation-cucumber@0.10.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/instrumentation': 0.54.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.54.2(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.27.0 transitivePeerDependencies: - supports-color @@ -23562,14 +24546,14 @@ snapshots: '@opentelemetry/instrumentation-dataloader@0.13.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/instrumentation': 0.54.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.54.2(@opentelemetry/api@1.9.0) transitivePeerDependencies: - supports-color '@opentelemetry/instrumentation-dns@0.40.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/instrumentation': 0.54.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.54.2(@opentelemetry/api@1.9.0) transitivePeerDependencies: - supports-color @@ -23577,7 +24561,7 @@ snapshots: dependencies: '@opentelemetry/api': 1.9.0 '@opentelemetry/core': 1.27.0(@opentelemetry/api@1.9.0) - '@opentelemetry/instrumentation': 0.54.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.54.2(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.27.0 transitivePeerDependencies: - supports-color @@ -23586,7 +24570,7 @@ snapshots: dependencies: '@opentelemetry/api': 1.9.0 '@opentelemetry/core': 1.27.0(@opentelemetry/api@1.9.0) - '@opentelemetry/instrumentation': 0.54.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.54.2(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.27.0 transitivePeerDependencies: - supports-color @@ -23595,28 +24579,28 @@ snapshots: dependencies: '@opentelemetry/api': 1.9.0 '@opentelemetry/core': 1.27.0(@opentelemetry/api@1.9.0) - '@opentelemetry/instrumentation': 0.54.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.54.2(@opentelemetry/api@1.9.0) transitivePeerDependencies: - supports-color '@opentelemetry/instrumentation-generic-pool@0.40.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/instrumentation': 0.54.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.54.2(@opentelemetry/api@1.9.0) transitivePeerDependencies: - supports-color '@opentelemetry/instrumentation-graphql@0.44.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/instrumentation': 0.54.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.54.2(@opentelemetry/api@1.9.0) transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation-grpc@0.54.0(@opentelemetry/api@1.9.0)': + '@opentelemetry/instrumentation-grpc@0.54.2(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/instrumentation': 0.54.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.54.2(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.27.0 transitivePeerDependencies: - supports-color @@ -23625,16 +24609,16 @@ snapshots: dependencies: '@opentelemetry/api': 1.9.0 '@opentelemetry/core': 1.27.0(@opentelemetry/api@1.9.0) - '@opentelemetry/instrumentation': 0.54.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.54.2(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.27.0 transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation-http@0.54.0(@opentelemetry/api@1.9.0)': + '@opentelemetry/instrumentation-http@0.54.2(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 '@opentelemetry/core': 1.27.0(@opentelemetry/api@1.9.0) - '@opentelemetry/instrumentation': 0.54.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.54.2(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.27.0 forwarded-parse: 2.1.2 semver: 7.7.1 @@ -23644,7 +24628,7 @@ snapshots: '@opentelemetry/instrumentation-ioredis@0.44.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/instrumentation': 0.54.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.54.2(@opentelemetry/api@1.9.0) '@opentelemetry/redis-common': 0.36.2 '@opentelemetry/semantic-conventions': 1.27.0 transitivePeerDependencies: @@ -23653,7 +24637,7 @@ snapshots: '@opentelemetry/instrumentation-kafkajs@0.4.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/instrumentation': 0.54.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.54.2(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.27.0 transitivePeerDependencies: - supports-color @@ -23661,7 +24645,7 @@ snapshots: '@opentelemetry/instrumentation-knex@0.41.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/instrumentation': 0.54.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.54.2(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.27.0 transitivePeerDependencies: - supports-color @@ -23670,7 +24654,7 @@ snapshots: dependencies: '@opentelemetry/api': 1.9.0 '@opentelemetry/core': 1.27.0(@opentelemetry/api@1.9.0) - '@opentelemetry/instrumentation': 0.54.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.54.2(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.27.0 transitivePeerDependencies: - supports-color @@ -23678,14 +24662,14 @@ snapshots: '@opentelemetry/instrumentation-lru-memoizer@0.41.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/instrumentation': 0.54.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.54.2(@opentelemetry/api@1.9.0) transitivePeerDependencies: - supports-color '@opentelemetry/instrumentation-memcached@0.40.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/instrumentation': 0.54.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.54.2(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.27.0 '@types/memcached': 2.2.10 transitivePeerDependencies: @@ -23694,7 +24678,7 @@ snapshots: '@opentelemetry/instrumentation-mongodb@0.48.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/instrumentation': 0.54.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.54.2(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.27.0 transitivePeerDependencies: - supports-color @@ -23703,15 +24687,15 @@ snapshots: dependencies: '@opentelemetry/api': 1.9.0 '@opentelemetry/core': 1.27.0(@opentelemetry/api@1.9.0) - '@opentelemetry/instrumentation': 0.54.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.54.2(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.27.0 transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation-mysql2@0.42.0(@opentelemetry/api@1.9.0)': + '@opentelemetry/instrumentation-mysql2@0.42.1(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/instrumentation': 0.54.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.54.2(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.27.0 '@opentelemetry/sql-common': 0.40.1(@opentelemetry/api@1.9.0) transitivePeerDependencies: @@ -23720,7 +24704,7 @@ snapshots: '@opentelemetry/instrumentation-mysql@0.42.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/instrumentation': 0.54.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.54.2(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.27.0 '@types/mysql': 2.15.26 transitivePeerDependencies: @@ -23729,7 +24713,7 @@ snapshots: '@opentelemetry/instrumentation-nestjs-core@0.41.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/instrumentation': 0.54.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.54.2(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.27.0 transitivePeerDependencies: - supports-color @@ -23737,16 +24721,16 @@ snapshots: '@opentelemetry/instrumentation-net@0.40.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/instrumentation': 0.54.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.54.2(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.27.0 transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation-pg@0.47.0(@opentelemetry/api@1.9.0)': + '@opentelemetry/instrumentation-pg@0.47.1(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 '@opentelemetry/core': 1.27.0(@opentelemetry/api@1.9.0) - '@opentelemetry/instrumentation': 0.54.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.54.2(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.27.0 '@opentelemetry/sql-common': 0.40.1(@opentelemetry/api@1.9.0) '@types/pg': 8.6.1 @@ -23757,16 +24741,16 @@ snapshots: '@opentelemetry/instrumentation-pino@0.43.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/api-logs': 0.54.0 + '@opentelemetry/api-logs': 0.54.2 '@opentelemetry/core': 1.27.0(@opentelemetry/api@1.9.0) - '@opentelemetry/instrumentation': 0.54.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.54.2(@opentelemetry/api@1.9.0) transitivePeerDependencies: - supports-color '@opentelemetry/instrumentation-redis-4@0.43.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/instrumentation': 0.54.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.54.2(@opentelemetry/api@1.9.0) '@opentelemetry/redis-common': 0.36.2 '@opentelemetry/semantic-conventions': 1.27.0 transitivePeerDependencies: @@ -23775,7 +24759,7 @@ snapshots: '@opentelemetry/instrumentation-redis@0.43.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/instrumentation': 0.54.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.54.2(@opentelemetry/api@1.9.0) '@opentelemetry/redis-common': 0.36.2 '@opentelemetry/semantic-conventions': 1.27.0 transitivePeerDependencies: @@ -23785,7 +24769,7 @@ snapshots: dependencies: '@opentelemetry/api': 1.9.0 '@opentelemetry/core': 1.27.0(@opentelemetry/api@1.9.0) - '@opentelemetry/instrumentation': 0.54.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.54.2(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.27.0 transitivePeerDependencies: - supports-color @@ -23793,7 +24777,7 @@ snapshots: '@opentelemetry/instrumentation-router@0.41.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/instrumentation': 0.54.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.54.2(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.27.0 transitivePeerDependencies: - supports-color @@ -23801,7 +24785,7 @@ snapshots: '@opentelemetry/instrumentation-socket.io@0.43.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/instrumentation': 0.54.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.54.2(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.27.0 transitivePeerDependencies: - supports-color @@ -23809,25 +24793,25 @@ snapshots: '@opentelemetry/instrumentation-tedious@0.15.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/instrumentation': 0.54.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.54.2(@opentelemetry/api@1.9.0) '@opentelemetry/semantic-conventions': 1.27.0 '@types/tedious': 4.0.14 transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation-undici@0.7.0(@opentelemetry/api@1.9.0)': + '@opentelemetry/instrumentation-undici@0.7.1(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 '@opentelemetry/core': 1.27.0(@opentelemetry/api@1.9.0) - '@opentelemetry/instrumentation': 0.54.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.54.2(@opentelemetry/api@1.9.0) transitivePeerDependencies: - supports-color '@opentelemetry/instrumentation-winston@0.41.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/api-logs': 0.54.0 - '@opentelemetry/instrumentation': 0.54.0(@opentelemetry/api@1.9.0) + '@opentelemetry/api-logs': 0.54.2 + '@opentelemetry/instrumentation': 0.54.2(@opentelemetry/api@1.9.0) transitivePeerDependencies: - supports-color @@ -23842,10 +24826,10 @@ snapshots: transitivePeerDependencies: - supports-color - '@opentelemetry/instrumentation@0.54.0(@opentelemetry/api@1.9.0)': + '@opentelemetry/instrumentation@0.54.2(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/api-logs': 0.54.0 + '@opentelemetry/api-logs': 0.54.2 '@types/shimmer': 1.2.0 import-in-the-middle: 1.11.2 require-in-the-middle: 7.4.0 @@ -23860,6 +24844,12 @@ snapshots: '@opentelemetry/core': 1.27.0(@opentelemetry/api@1.9.0) '@opentelemetry/otlp-transformer': 0.54.0(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-exporter-base@0.54.2(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.27.0(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-transformer': 0.54.2(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-grpc-exporter-base@0.54.0(@opentelemetry/api@1.9.0)': dependencies: '@grpc/grpc-js': 1.10.10 @@ -23868,6 +24858,14 @@ snapshots: '@opentelemetry/otlp-exporter-base': 0.54.0(@opentelemetry/api@1.9.0) '@opentelemetry/otlp-transformer': 0.54.0(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-grpc-exporter-base@0.54.2(@opentelemetry/api@1.9.0)': + dependencies: + '@grpc/grpc-js': 1.10.10 + '@opentelemetry/api': 1.9.0 + '@opentelemetry/core': 1.27.0(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-exporter-base': 0.54.2(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-transformer': 0.54.2(@opentelemetry/api@1.9.0) + '@opentelemetry/otlp-transformer@0.54.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 @@ -23879,14 +24877,20 @@ snapshots: '@opentelemetry/sdk-trace-base': 1.27.0(@opentelemetry/api@1.9.0) protobufjs: 7.4.0 - '@opentelemetry/propagation-utils@0.30.12(@opentelemetry/api@1.9.0)': + '@opentelemetry/otlp-transformer@0.54.2(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 + '@opentelemetry/api-logs': 0.54.2 + '@opentelemetry/core': 1.27.0(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.27.0(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-logs': 0.54.2(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-metrics': 1.27.0(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-trace-base': 1.27.0(@opentelemetry/api@1.9.0) + protobufjs: 7.4.0 - '@opentelemetry/propagator-aws-xray@1.26.0(@opentelemetry/api@1.9.0)': + '@opentelemetry/propagation-utils@0.30.12(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/core': 1.26.0(@opentelemetry/api@1.9.0) '@opentelemetry/propagator-b3@1.27.0(@opentelemetry/api@1.9.0)': dependencies: @@ -23952,27 +24956,34 @@ snapshots: '@opentelemetry/core': 1.27.0(@opentelemetry/api@1.9.0) '@opentelemetry/resources': 1.27.0(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-logs@0.54.2(@opentelemetry/api@1.9.0)': + dependencies: + '@opentelemetry/api': 1.9.0 + '@opentelemetry/api-logs': 0.54.2 + '@opentelemetry/core': 1.27.0(@opentelemetry/api@1.9.0) + '@opentelemetry/resources': 1.27.0(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-metrics@1.27.0(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 '@opentelemetry/core': 1.27.0(@opentelemetry/api@1.9.0) '@opentelemetry/resources': 1.27.0(@opentelemetry/api@1.9.0) - '@opentelemetry/sdk-node@0.54.0(@opentelemetry/api@1.9.0)': + '@opentelemetry/sdk-node@0.54.2(@opentelemetry/api@1.9.0)': dependencies: '@opentelemetry/api': 1.9.0 - '@opentelemetry/api-logs': 0.54.0 + '@opentelemetry/api-logs': 0.54.2 '@opentelemetry/core': 1.27.0(@opentelemetry/api@1.9.0) - '@opentelemetry/exporter-logs-otlp-grpc': 0.54.0(@opentelemetry/api@1.9.0) - '@opentelemetry/exporter-logs-otlp-http': 0.54.0(@opentelemetry/api@1.9.0) - '@opentelemetry/exporter-logs-otlp-proto': 0.54.0(@opentelemetry/api@1.9.0) - '@opentelemetry/exporter-trace-otlp-grpc': 0.54.0(@opentelemetry/api@1.9.0) - '@opentelemetry/exporter-trace-otlp-http': 0.54.0(@opentelemetry/api@1.9.0) - '@opentelemetry/exporter-trace-otlp-proto': 0.54.0(@opentelemetry/api@1.9.0) + '@opentelemetry/exporter-logs-otlp-grpc': 0.54.2(@opentelemetry/api@1.9.0) + '@opentelemetry/exporter-logs-otlp-http': 0.54.2(@opentelemetry/api@1.9.0) + '@opentelemetry/exporter-logs-otlp-proto': 0.54.2(@opentelemetry/api@1.9.0) + '@opentelemetry/exporter-trace-otlp-grpc': 0.54.2(@opentelemetry/api@1.9.0) + '@opentelemetry/exporter-trace-otlp-http': 0.54.2(@opentelemetry/api@1.9.0) + '@opentelemetry/exporter-trace-otlp-proto': 0.54.2(@opentelemetry/api@1.9.0) '@opentelemetry/exporter-zipkin': 1.27.0(@opentelemetry/api@1.9.0) - '@opentelemetry/instrumentation': 0.54.0(@opentelemetry/api@1.9.0) + '@opentelemetry/instrumentation': 0.54.2(@opentelemetry/api@1.9.0) '@opentelemetry/resources': 1.27.0(@opentelemetry/api@1.9.0) - '@opentelemetry/sdk-logs': 0.54.0(@opentelemetry/api@1.9.0) + '@opentelemetry/sdk-logs': 0.54.2(@opentelemetry/api@1.9.0) '@opentelemetry/sdk-metrics': 1.27.0(@opentelemetry/api@1.9.0) '@opentelemetry/sdk-trace-base': 1.27.0(@opentelemetry/api@1.9.0) '@opentelemetry/sdk-trace-node': 1.27.0(@opentelemetry/api@1.9.0) @@ -24004,6 +25015,10 @@ snapshots: '@opentelemetry/api': 1.9.0 '@opentelemetry/core': 1.27.0(@opentelemetry/api@1.9.0) + '@paralleldrive/cuid2@2.2.2': + dependencies: + '@noble/hashes': 1.8.0 + '@petamoriken/float16@3.8.7': {} '@pinecone-database/pinecone@2.2.2': @@ -24024,7 +25039,7 @@ snapshots: dependencies: playwright: 1.49.1 - '@pmmmwh/react-refresh-webpack-plugin@0.5.11(react-refresh@0.11.0)(type-fest@4.12.0)(webpack-dev-server@4.15.1(bufferutil@4.0.8)(utf-8-validate@6.0.4)(webpack@5.90.3(@swc/core@1.4.6)))(webpack@5.90.3(@swc/core@1.4.6))': + '@pmmmwh/react-refresh-webpack-plugin@0.5.11(react-refresh@0.11.0)(type-fest@4.40.1)(webpack-dev-server@4.15.1(bufferutil@4.0.8)(utf-8-validate@6.0.4)(webpack@5.90.3(@swc/core@1.4.6)))(webpack@5.90.3(@swc/core@1.4.6))': dependencies: ansi-html-community: 0.0.8 common-path-prefix: 3.0.0 @@ -24038,7 +25053,7 @@ snapshots: source-map: 0.7.4 webpack: 5.90.3(@swc/core@1.4.6) optionalDependencies: - type-fest: 4.12.0 + type-fest: 4.40.1 webpack-dev-server: 4.15.1(bufferutil@4.0.8)(utf-8-validate@6.0.4)(webpack@5.90.3(@swc/core@1.4.6)) '@popperjs/core@2.11.8': {} @@ -24089,29 +25104,29 @@ snapshots: '@qdrant/openapi-typescript-fetch@1.2.6': {} - '@reactflow/background@11.3.9(@types/react@18.2.65)(immer@9.0.21)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': + '@reactflow/background@11.3.9(@types/react@18.2.65)(immer@10.1.1)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': dependencies: - '@reactflow/core': 11.10.4(@types/react@18.2.65)(immer@9.0.21)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@reactflow/core': 11.10.4(@types/react@18.2.65)(immer@10.1.1)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) classcat: 5.0.4 react: 18.2.0 react-dom: 18.2.0(react@18.2.0) - zustand: 4.5.2(@types/react@18.2.65)(immer@9.0.21)(react@18.2.0) + zustand: 4.5.2(@types/react@18.2.65)(immer@10.1.1)(react@18.2.0) transitivePeerDependencies: - '@types/react' - immer - '@reactflow/controls@11.2.9(@types/react@18.2.65)(immer@9.0.21)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': + '@reactflow/controls@11.2.9(@types/react@18.2.65)(immer@10.1.1)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': dependencies: - '@reactflow/core': 11.10.4(@types/react@18.2.65)(immer@9.0.21)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@reactflow/core': 11.10.4(@types/react@18.2.65)(immer@10.1.1)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) classcat: 5.0.4 react: 18.2.0 react-dom: 18.2.0(react@18.2.0) - zustand: 4.5.2(@types/react@18.2.65)(immer@9.0.21)(react@18.2.0) + zustand: 4.5.2(@types/react@18.2.65)(immer@10.1.1)(react@18.2.0) transitivePeerDependencies: - '@types/react' - immer - '@reactflow/core@11.10.4(@types/react@18.2.65)(immer@9.0.21)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': + '@reactflow/core@11.10.4(@types/react@18.2.65)(immer@10.1.1)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': dependencies: '@types/d3': 7.4.3 '@types/d3-drag': 3.0.7 @@ -24123,14 +25138,14 @@ snapshots: d3-zoom: 3.0.0 react: 18.2.0 react-dom: 18.2.0(react@18.2.0) - zustand: 4.5.2(@types/react@18.2.65)(immer@9.0.21)(react@18.2.0) + zustand: 4.5.2(@types/react@18.2.65)(immer@10.1.1)(react@18.2.0) transitivePeerDependencies: - '@types/react' - immer - '@reactflow/minimap@11.7.9(@types/react@18.2.65)(immer@9.0.21)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': + '@reactflow/minimap@11.7.9(@types/react@18.2.65)(immer@10.1.1)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': dependencies: - '@reactflow/core': 11.10.4(@types/react@18.2.65)(immer@9.0.21)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@reactflow/core': 11.10.4(@types/react@18.2.65)(immer@10.1.1)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) '@types/d3-selection': 3.0.10 '@types/d3-zoom': 3.0.8 classcat: 5.0.4 @@ -24138,31 +25153,31 @@ snapshots: d3-zoom: 3.0.0 react: 18.2.0 react-dom: 18.2.0(react@18.2.0) - zustand: 4.5.2(@types/react@18.2.65)(immer@9.0.21)(react@18.2.0) + zustand: 4.5.2(@types/react@18.2.65)(immer@10.1.1)(react@18.2.0) transitivePeerDependencies: - '@types/react' - immer - '@reactflow/node-resizer@2.2.9(@types/react@18.2.65)(immer@9.0.21)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': + '@reactflow/node-resizer@2.2.9(@types/react@18.2.65)(immer@10.1.1)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': dependencies: - '@reactflow/core': 11.10.4(@types/react@18.2.65)(immer@9.0.21)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@reactflow/core': 11.10.4(@types/react@18.2.65)(immer@10.1.1)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) classcat: 5.0.4 d3-drag: 3.0.0 d3-selection: 3.0.0 react: 18.2.0 react-dom: 18.2.0(react@18.2.0) - zustand: 4.5.2(@types/react@18.2.65)(immer@9.0.21)(react@18.2.0) + zustand: 4.5.2(@types/react@18.2.65)(immer@10.1.1)(react@18.2.0) transitivePeerDependencies: - '@types/react' - immer - '@reactflow/node-toolbar@1.3.9(@types/react@18.2.65)(immer@9.0.21)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': + '@reactflow/node-toolbar@1.3.9(@types/react@18.2.65)(immer@10.1.1)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': dependencies: - '@reactflow/core': 11.10.4(@types/react@18.2.65)(immer@9.0.21)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@reactflow/core': 11.10.4(@types/react@18.2.65)(immer@10.1.1)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) classcat: 5.0.4 react: 18.2.0 react-dom: 18.2.0(react@18.2.0) - zustand: 4.5.2(@types/react@18.2.65)(immer@9.0.21)(react@18.2.0) + zustand: 4.5.2(@types/react@18.2.65)(immer@10.1.1)(react@18.2.0) transitivePeerDependencies: - '@types/react' - immer @@ -24171,28 +25186,64 @@ snapshots: dependencies: '@redis/client': 1.5.14 + '@redis/bloom@1.2.0(@redis/client@1.6.0)': + dependencies: + '@redis/client': 1.6.0 + '@redis/client@1.5.14': dependencies: cluster-key-slot: 1.1.2 generic-pool: 3.9.0 yallist: 4.0.0 + '@redis/client@1.6.0': + dependencies: + cluster-key-slot: 1.1.2 + generic-pool: 3.9.0 + yallist: 4.0.0 + '@redis/graph@1.1.1(@redis/client@1.5.14)': dependencies: '@redis/client': 1.5.14 + '@redis/graph@1.1.1(@redis/client@1.6.0)': + dependencies: + '@redis/client': 1.6.0 + '@redis/json@1.0.6(@redis/client@1.5.14)': dependencies: '@redis/client': 1.5.14 + '@redis/json@1.0.7(@redis/client@1.6.0)': + dependencies: + '@redis/client': 1.6.0 + '@redis/search@1.1.6(@redis/client@1.5.14)': dependencies: '@redis/client': 1.5.14 + '@redis/search@1.2.0(@redis/client@1.6.0)': + dependencies: + '@redis/client': 1.6.0 + '@redis/time-series@1.0.5(@redis/client@1.5.14)': dependencies: '@redis/client': 1.5.14 + '@redis/time-series@1.1.0(@redis/client@1.6.0)': + dependencies: + '@redis/client': 1.6.0 + + '@reduxjs/toolkit@2.2.7(react-redux@8.1.3(@types/react-dom@18.2.21)(@types/react@18.2.65)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)(redux@4.2.1))(react@18.2.0)': + dependencies: + immer: 10.1.1 + redux: 5.0.1 + redux-thunk: 3.1.0(redux@5.0.1) + reselect: 5.1.1 + optionalDependencies: + react: 18.2.0 + react-redux: 8.1.3(@types/react-dom@18.2.21)(@types/react@18.2.65)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)(redux@4.2.1) + '@remirror/core-constants@3.0.0': {} '@rollup/plugin-babel@5.3.1(@babel/core@7.24.0)(@types/babel__core@7.20.5)(rollup@2.79.1)': @@ -24340,6 +25391,14 @@ snapshots: dependencies: type-detect: 4.0.8 + '@sinonjs/commons@3.0.1': + dependencies: + type-detect: 4.0.8 + + '@sinonjs/fake-timers@10.3.0': + dependencies: + '@sinonjs/commons': 3.0.1 + '@sinonjs/fake-timers@8.1.0': dependencies: '@sinonjs/commons': 1.8.6 @@ -24349,11 +25408,6 @@ snapshots: '@smithy/types': 2.11.0 tslib: 2.6.2 - '@smithy/abort-controller@4.0.0': - dependencies: - '@smithy/types': 4.0.0 - tslib: 2.6.2 - '@smithy/abort-controller@4.0.1': dependencies: '@smithy/types': 4.1.0 @@ -24376,14 +25430,6 @@ snapshots: '@smithy/util-middleware': 2.1.4 tslib: 2.6.2 - '@smithy/config-resolver@4.0.0': - dependencies: - '@smithy/node-config-provider': 4.0.0 - '@smithy/types': 4.0.0 - '@smithy/util-config-provider': 4.0.0 - '@smithy/util-middleware': 4.0.0 - tslib: 2.6.2 - '@smithy/config-resolver@4.0.1': dependencies: '@smithy/node-config-provider': 4.0.1 @@ -24403,14 +25449,14 @@ snapshots: '@smithy/util-middleware': 2.1.4 tslib: 2.6.2 - '@smithy/core@3.0.0': + '@smithy/core@3.1.0': dependencies: - '@smithy/middleware-serde': 4.0.0 - '@smithy/protocol-http': 5.0.0 - '@smithy/types': 4.0.0 + '@smithy/middleware-serde': 4.0.1 + '@smithy/protocol-http': 5.0.1 + '@smithy/types': 4.1.0 '@smithy/util-body-length-browser': 4.0.0 - '@smithy/util-middleware': 4.0.0 - '@smithy/util-stream': 4.0.0 + '@smithy/util-middleware': 4.0.1 + '@smithy/util-stream': 4.0.1 '@smithy/util-utf8': 4.0.0 tslib: 2.6.2 @@ -24433,14 +25479,6 @@ snapshots: '@smithy/url-parser': 2.1.4 tslib: 2.6.2 - '@smithy/credential-provider-imds@4.0.0': - dependencies: - '@smithy/node-config-provider': 4.0.0 - '@smithy/property-provider': 4.0.0 - '@smithy/types': 4.0.0 - '@smithy/url-parser': 4.0.0 - tslib: 2.6.2 - '@smithy/credential-provider-imds@4.0.1': dependencies: '@smithy/node-config-provider': 4.0.1 @@ -24517,14 +25555,6 @@ snapshots: '@smithy/util-base64': 2.2.0 tslib: 2.6.2 - '@smithy/fetch-http-handler@5.0.0': - dependencies: - '@smithy/protocol-http': 5.0.0 - '@smithy/querystring-builder': 4.0.0 - '@smithy/types': 4.0.0 - '@smithy/util-base64': 4.0.0 - tslib: 2.6.2 - '@smithy/fetch-http-handler@5.0.1': dependencies: '@smithy/protocol-http': 5.0.1 @@ -24547,13 +25577,6 @@ snapshots: '@smithy/util-utf8': 2.2.0 tslib: 2.6.2 - '@smithy/hash-node@4.0.0': - dependencies: - '@smithy/types': 4.0.0 - '@smithy/util-buffer-from': 4.0.0 - '@smithy/util-utf8': 4.0.0 - tslib: 2.6.2 - '@smithy/hash-node@4.0.1': dependencies: '@smithy/types': 4.1.0 @@ -24572,11 +25595,6 @@ snapshots: '@smithy/types': 2.11.0 tslib: 2.6.2 - '@smithy/invalid-dependency@4.0.0': - dependencies: - '@smithy/types': 4.0.0 - tslib: 2.6.2 - '@smithy/invalid-dependency@4.0.1': dependencies: '@smithy/types': 4.1.0 @@ -24602,12 +25620,6 @@ snapshots: '@smithy/types': 2.11.0 tslib: 2.6.2 - '@smithy/middleware-content-length@4.0.0': - dependencies: - '@smithy/protocol-http': 5.0.0 - '@smithy/types': 4.0.0 - tslib: 2.6.2 - '@smithy/middleware-content-length@4.0.1': dependencies: '@smithy/protocol-http': 5.0.1 @@ -24624,15 +25636,15 @@ snapshots: '@smithy/util-middleware': 2.1.4 tslib: 2.6.2 - '@smithy/middleware-endpoint@4.0.0': + '@smithy/middleware-endpoint@4.0.1': dependencies: - '@smithy/core': 3.0.0 - '@smithy/middleware-serde': 4.0.0 - '@smithy/node-config-provider': 4.0.0 - '@smithy/shared-ini-file-loader': 4.0.0 - '@smithy/types': 4.0.0 - '@smithy/url-parser': 4.0.0 - '@smithy/util-middleware': 4.0.0 + '@smithy/core': 3.1.0 + '@smithy/middleware-serde': 4.0.1 + '@smithy/node-config-provider': 4.0.1 + '@smithy/shared-ini-file-loader': 4.0.1 + '@smithy/types': 4.1.0 + '@smithy/url-parser': 4.0.1 + '@smithy/util-middleware': 4.0.1 tslib: 2.6.2 '@smithy/middleware-endpoint@4.0.6': @@ -24658,15 +25670,15 @@ snapshots: tslib: 2.6.2 uuid: 8.3.2 - '@smithy/middleware-retry@4.0.0': + '@smithy/middleware-retry@4.0.2': dependencies: - '@smithy/node-config-provider': 4.0.0 - '@smithy/protocol-http': 5.0.0 - '@smithy/service-error-classification': 4.0.0 - '@smithy/smithy-client': 4.0.0 - '@smithy/types': 4.0.0 - '@smithy/util-middleware': 4.0.0 - '@smithy/util-retry': 4.0.0 + '@smithy/node-config-provider': 4.0.1 + '@smithy/protocol-http': 5.0.1 + '@smithy/service-error-classification': 4.0.1 + '@smithy/smithy-client': 4.1.1 + '@smithy/types': 4.1.0 + '@smithy/util-middleware': 4.0.1 + '@smithy/util-retry': 4.0.1 tslib: 2.6.2 uuid: 9.0.1 @@ -24687,9 +25699,9 @@ snapshots: '@smithy/types': 2.11.0 tslib: 2.6.2 - '@smithy/middleware-serde@4.0.0': + '@smithy/middleware-serde@4.0.1': dependencies: - '@smithy/types': 4.0.0 + '@smithy/types': 4.1.0 tslib: 2.6.2 '@smithy/middleware-serde@4.0.2': @@ -24702,11 +25714,6 @@ snapshots: '@smithy/types': 2.11.0 tslib: 2.6.2 - '@smithy/middleware-stack@4.0.0': - dependencies: - '@smithy/types': 4.0.0 - tslib: 2.6.2 - '@smithy/middleware-stack@4.0.1': dependencies: '@smithy/types': 4.1.0 @@ -24719,13 +25726,6 @@ snapshots: '@smithy/types': 2.11.0 tslib: 2.6.2 - '@smithy/node-config-provider@4.0.0': - dependencies: - '@smithy/property-provider': 4.0.0 - '@smithy/shared-ini-file-loader': 4.0.0 - '@smithy/types': 4.0.0 - tslib: 2.6.2 - '@smithy/node-config-provider@4.0.1': dependencies: '@smithy/property-provider': 4.0.1 @@ -24741,12 +25741,12 @@ snapshots: '@smithy/types': 2.11.0 tslib: 2.6.2 - '@smithy/node-http-handler@4.0.0': + '@smithy/node-http-handler@4.0.1': dependencies: - '@smithy/abort-controller': 4.0.0 - '@smithy/protocol-http': 5.0.0 - '@smithy/querystring-builder': 4.0.0 - '@smithy/types': 4.0.0 + '@smithy/abort-controller': 4.0.1 + '@smithy/protocol-http': 5.0.1 + '@smithy/querystring-builder': 4.0.1 + '@smithy/types': 4.1.0 tslib: 2.6.2 '@smithy/node-http-handler@4.0.3': @@ -24762,11 +25762,6 @@ snapshots: '@smithy/types': 2.11.0 tslib: 2.6.2 - '@smithy/property-provider@4.0.0': - dependencies: - '@smithy/types': 4.0.0 - tslib: 2.6.2 - '@smithy/property-provider@4.0.1': dependencies: '@smithy/types': 4.1.0 @@ -24777,11 +25772,6 @@ snapshots: '@smithy/types': 2.11.0 tslib: 2.6.2 - '@smithy/protocol-http@5.0.0': - dependencies: - '@smithy/types': 4.0.0 - tslib: 2.6.2 - '@smithy/protocol-http@5.0.1': dependencies: '@smithy/types': 4.1.0 @@ -24793,12 +25783,6 @@ snapshots: '@smithy/util-uri-escape': 2.1.1 tslib: 2.6.2 - '@smithy/querystring-builder@4.0.0': - dependencies: - '@smithy/types': 4.0.0 - '@smithy/util-uri-escape': 4.0.0 - tslib: 2.6.2 - '@smithy/querystring-builder@4.0.1': dependencies: '@smithy/types': 4.1.0 @@ -24810,11 +25794,6 @@ snapshots: '@smithy/types': 2.11.0 tslib: 2.6.2 - '@smithy/querystring-parser@4.0.0': - dependencies: - '@smithy/types': 4.0.0 - tslib: 2.6.2 - '@smithy/querystring-parser@4.0.1': dependencies: '@smithy/types': 4.1.0 @@ -24824,10 +25803,6 @@ snapshots: dependencies: '@smithy/types': 2.11.0 - '@smithy/service-error-classification@4.0.0': - dependencies: - '@smithy/types': 4.0.0 - '@smithy/service-error-classification@4.0.1': dependencies: '@smithy/types': 4.1.0 @@ -24837,11 +25812,6 @@ snapshots: '@smithy/types': 2.11.0 tslib: 2.6.2 - '@smithy/shared-ini-file-loader@4.0.0': - dependencies: - '@smithy/types': 4.0.0 - tslib: 2.6.2 - '@smithy/shared-ini-file-loader@4.0.1': dependencies: '@smithy/types': 4.1.0 @@ -24858,17 +25828,6 @@ snapshots: '@smithy/util-utf8': 2.2.0 tslib: 2.6.2 - '@smithy/signature-v4@5.0.0': - dependencies: - '@smithy/is-array-buffer': 4.0.0 - '@smithy/protocol-http': 5.0.0 - '@smithy/types': 4.0.0 - '@smithy/util-hex-encoding': 4.0.0 - '@smithy/util-middleware': 4.0.0 - '@smithy/util-uri-escape': 4.0.0 - '@smithy/util-utf8': 4.0.0 - tslib: 2.6.2 - '@smithy/signature-v4@5.0.1': dependencies: '@smithy/is-array-buffer': 4.0.0 @@ -24889,14 +25848,14 @@ snapshots: '@smithy/util-stream': 2.1.4 tslib: 2.6.2 - '@smithy/smithy-client@4.0.0': + '@smithy/smithy-client@4.1.1': dependencies: - '@smithy/core': 3.0.0 - '@smithy/middleware-endpoint': 4.0.0 - '@smithy/middleware-stack': 4.0.0 - '@smithy/protocol-http': 5.0.0 - '@smithy/types': 4.0.0 - '@smithy/util-stream': 4.0.0 + '@smithy/core': 3.1.0 + '@smithy/middleware-endpoint': 4.0.1 + '@smithy/middleware-stack': 4.0.1 + '@smithy/protocol-http': 5.0.1 + '@smithy/types': 4.1.0 + '@smithy/util-stream': 4.0.1 tslib: 2.6.2 '@smithy/smithy-client@4.1.6': @@ -24913,7 +25872,7 @@ snapshots: dependencies: tslib: 2.6.2 - '@smithy/types@4.0.0': + '@smithy/types@3.3.0': dependencies: tslib: 2.6.2 @@ -24927,12 +25886,6 @@ snapshots: '@smithy/types': 2.11.0 tslib: 2.6.2 - '@smithy/url-parser@4.0.0': - dependencies: - '@smithy/querystring-parser': 4.0.0 - '@smithy/types': 4.0.0 - tslib: 2.6.2 - '@smithy/url-parser@4.0.1': dependencies: '@smithy/querystring-parser': 4.0.1 @@ -24993,11 +25946,11 @@ snapshots: bowser: 2.11.0 tslib: 2.6.2 - '@smithy/util-defaults-mode-browser@4.0.0': + '@smithy/util-defaults-mode-browser@4.0.2': dependencies: - '@smithy/property-provider': 4.0.0 - '@smithy/smithy-client': 4.0.0 - '@smithy/types': 4.0.0 + '@smithy/property-provider': 4.0.1 + '@smithy/smithy-client': 4.1.1 + '@smithy/types': 4.1.0 bowser: 2.11.0 tslib: 2.6.2 @@ -25019,14 +25972,14 @@ snapshots: '@smithy/types': 2.11.0 tslib: 2.6.2 - '@smithy/util-defaults-mode-node@4.0.0': + '@smithy/util-defaults-mode-node@4.0.2': dependencies: - '@smithy/config-resolver': 4.0.0 - '@smithy/credential-provider-imds': 4.0.0 - '@smithy/node-config-provider': 4.0.0 - '@smithy/property-provider': 4.0.0 - '@smithy/smithy-client': 4.0.0 - '@smithy/types': 4.0.0 + '@smithy/config-resolver': 4.0.1 + '@smithy/credential-provider-imds': 4.0.1 + '@smithy/node-config-provider': 4.0.1 + '@smithy/property-provider': 4.0.1 + '@smithy/smithy-client': 4.1.1 + '@smithy/types': 4.1.0 tslib: 2.6.2 '@smithy/util-defaults-mode-node@4.0.7': @@ -25045,12 +25998,6 @@ snapshots: '@smithy/types': 2.11.0 tslib: 2.6.2 - '@smithy/util-endpoints@3.0.0': - dependencies: - '@smithy/node-config-provider': 4.0.0 - '@smithy/types': 4.0.0 - tslib: 2.6.2 - '@smithy/util-endpoints@3.0.1': dependencies: '@smithy/node-config-provider': 4.0.1 @@ -25070,11 +26017,6 @@ snapshots: '@smithy/types': 2.11.0 tslib: 2.6.2 - '@smithy/util-middleware@4.0.0': - dependencies: - '@smithy/types': 4.0.0 - tslib: 2.6.2 - '@smithy/util-middleware@4.0.1': dependencies: '@smithy/types': 4.1.0 @@ -25086,12 +26028,6 @@ snapshots: '@smithy/types': 2.11.0 tslib: 2.6.2 - '@smithy/util-retry@4.0.0': - dependencies: - '@smithy/service-error-classification': 4.0.0 - '@smithy/types': 4.0.0 - tslib: 2.6.2 - '@smithy/util-retry@4.0.1': dependencies: '@smithy/service-error-classification': 4.0.1 @@ -25109,11 +26045,11 @@ snapshots: '@smithy/util-utf8': 2.2.0 tslib: 2.6.2 - '@smithy/util-stream@4.0.0': + '@smithy/util-stream@4.0.1': dependencies: - '@smithy/fetch-http-handler': 5.0.0 - '@smithy/node-http-handler': 4.0.0 - '@smithy/types': 4.0.0 + '@smithy/fetch-http-handler': 5.0.1 + '@smithy/node-http-handler': 4.0.1 + '@smithy/types': 4.1.0 '@smithy/util-base64': 4.0.0 '@smithy/util-buffer-from': 4.0.0 '@smithy/util-hex-encoding': 4.0.0 @@ -25157,7 +26093,7 @@ snapshots: '@sqltools/formatter@1.2.5': {} - '@stripe/agent-toolkit@0.1.20(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(ai@3.2.22(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(react@18.2.0)(solid-js@1.7.1)(svelte@4.2.18)(vue@3.4.31(typescript@5.5.2))(zod@3.22.4))': + '@stripe/agent-toolkit@0.1.21(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(ai@3.2.22(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(react@18.2.0)(solid-js@1.7.1)(svelte@4.2.18)(vue@3.4.31(typescript@5.5.2))(zod@3.22.4))': dependencies: '@langchain/core': 0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)) ai: 3.2.22(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(react@18.2.0)(solid-js@1.7.1)(svelte@4.2.18)(vue@3.4.31(typescript@5.5.2))(zod@3.22.4) @@ -25210,7 +26146,7 @@ snapshots: '@surma/rollup-plugin-off-main-thread@2.2.3': dependencies: - ejs: 3.1.9 + ejs: 3.1.10 json5: 2.2.3 magic-string: 0.25.9 string.prototype.matchall: 4.0.10 @@ -25346,7 +26282,7 @@ snapshots: '@testing-library/dom@9.3.4': dependencies: '@babel/code-frame': 7.26.2 - '@babel/runtime': 7.26.9 + '@babel/runtime': 7.26.10 '@types/aria-query': 5.0.4 aria-query: 5.1.3 chalk: 4.1.2 @@ -25603,50 +26539,58 @@ snapshots: dependencies: '@babel/types': 7.26.0 + '@types/bcryptjs@2.4.6': {} + '@types/body-parser@1.19.5': dependencies: '@types/connect': 3.4.38 - '@types/node': 22.13.9 + '@types/node': 22.5.4 '@types/bonjour@3.5.13': dependencies: - '@types/node': 22.13.9 + '@types/node': 22.5.4 '@types/bunyan@1.8.9': dependencies: - '@types/node': 22.13.9 + '@types/node': 22.5.4 '@types/cacheable-request@6.0.3': dependencies: '@types/http-cache-semantics': 4.0.4 '@types/keyv': 3.1.4 - '@types/node': 22.13.9 + '@types/node': 22.5.4 '@types/responselike': 1.0.3 '@types/caseless@0.12.5': {} '@types/cli-progress@3.11.5': dependencies: - '@types/node': 22.13.9 + '@types/node': 22.5.4 '@types/connect-history-api-fallback@1.5.4': dependencies: '@types/express-serve-static-core': 4.17.43 - '@types/node': 22.13.9 + '@types/node': 22.5.4 '@types/connect@3.4.36': dependencies: - '@types/node': 22.13.9 + '@types/node': 22.5.4 '@types/connect@3.4.38': dependencies: - '@types/node': 22.13.9 + '@types/node': 22.5.4 '@types/content-disposition@0.5.8': {} + '@types/cookie-parser@1.4.7': + dependencies: + '@types/express': 4.17.21 + + '@types/cookiejar@2.1.5': {} + '@types/cors@2.8.17': dependencies: - '@types/node': 20.11.26 + '@types/node': 22.5.4 '@types/crypto-js@4.2.2': {} @@ -25791,18 +26735,22 @@ snapshots: '@types/express-serve-static-core@4.17.43': dependencies: - '@types/node': 20.12.12 + '@types/node': 22.5.4 '@types/qs': 6.9.17 '@types/range-parser': 1.2.7 '@types/send': 0.17.4 '@types/express-serve-static-core@5.0.6': dependencies: - '@types/node': 22.13.9 + '@types/node': 22.5.4 '@types/qs': 6.9.17 '@types/range-parser': 1.2.7 '@types/send': 0.17.4 + '@types/express-session@1.18.0': + dependencies: + '@types/express': 4.17.21 + '@types/express@4.17.21': dependencies: '@types/body-parser': 1.19.5 @@ -25820,7 +26768,7 @@ snapshots: '@types/glob-stream@8.0.2': dependencies: - '@types/node': 22.13.9 + '@types/node': 22.5.4 '@types/picomatch': 2.3.3 '@types/streamx': 2.9.5 @@ -25828,7 +26776,7 @@ snapshots: '@types/graceful-fs@4.1.9': dependencies: - '@types/node': 22.13.9 + '@types/node': 22.5.4 '@types/gulp@4.0.9': dependencies: @@ -25857,7 +26805,7 @@ snapshots: '@types/http-proxy@1.17.16': dependencies: - '@types/node': 22.13.9 + '@types/node': 22.5.4 '@types/istanbul-lib-coverage@2.0.6': {} @@ -25869,7 +26817,7 @@ snapshots: dependencies: '@types/istanbul-lib-report': 3.0.3 - '@types/jest@29.5.12': + '@types/jest@29.5.14': dependencies: expect: 29.7.0 pretty-format: 29.7.0 @@ -25878,7 +26826,7 @@ snapshots: '@types/jsdom@21.1.6': dependencies: - '@types/node': 20.11.26 + '@types/node': 20.12.12 '@types/tough-cookie': 4.0.5 parse5: 7.1.2 @@ -25886,11 +26834,15 @@ snapshots: '@types/json5@0.0.29': {} + '@types/jsonwebtoken@9.0.6': + dependencies: + '@types/node': 22.5.4 + '@types/katex@0.16.7': {} '@types/keyv@3.1.4': dependencies: - '@types/node': 22.13.9 + '@types/node': 22.5.4 '@types/linkify-it@5.0.0': {} @@ -25923,7 +26875,9 @@ snapshots: '@types/memcached@2.2.10': dependencies: - '@types/node': 22.13.9 + '@types/node': 22.5.4 + + '@types/methods@1.1.4': {} '@types/mime@1.3.5': {} @@ -25937,7 +26891,7 @@ snapshots: dependencies: '@aws-sdk/client-s3': 3.529.1 '@types/multer': 1.4.11 - '@types/node': 20.12.12 + '@types/node': 22.5.4 transitivePeerDependencies: - aws-crt @@ -25947,26 +26901,26 @@ snapshots: '@types/mysql@2.15.26': dependencies: - '@types/node': 22.13.9 + '@types/node': 22.5.4 '@types/node-fetch@2.6.11': dependencies: - '@types/node': 22.13.9 + '@types/node': 22.5.4 form-data: 4.0.1 '@types/node-fetch@2.6.12': dependencies: - '@types/node': 22.13.9 + '@types/node': 22.5.4 form-data: 4.0.1 '@types/node-fetch@2.6.2': dependencies: - '@types/node': 20.11.26 + '@types/node': 20.12.12 form-data: 3.0.1 '@types/node-forge@1.3.11': dependencies: - '@types/node': 22.13.9 + '@types/node': 22.5.4 '@types/node@10.14.22': {} @@ -25976,48 +26930,96 @@ snapshots: dependencies: undici-types: 5.26.5 - '@types/node@20.11.26': + '@types/node@20.12.12': dependencies: undici-types: 5.26.5 - '@types/node@20.12.12': + '@types/node@22.5.4': dependencies: - undici-types: 5.26.5 + undici-types: 6.19.8 - '@types/node@22.13.9': + '@types/nodemailer@6.4.15': dependencies: - undici-types: 6.20.0 + '@types/node': 22.5.4 '@types/normalize-package-data@2.4.4': {} + '@types/oauth@0.9.6': + dependencies: + '@types/node': 22.5.4 + '@types/object-hash@3.0.6': {} '@types/papaparse@5.3.15': dependencies: - '@types/node': 20.12.12 + '@types/node': 22.5.4 '@types/parse-json@4.0.2': {} + '@types/passport-auth0@1.0.9': + dependencies: + '@types/express': 4.17.21 + '@types/passport': 1.0.16 + + '@types/passport-github@1.1.12': + dependencies: + '@types/express': 4.17.21 + '@types/passport': 1.0.16 + '@types/passport-oauth2': 1.4.17 + + '@types/passport-jwt@4.0.1': + dependencies: + '@types/jsonwebtoken': 9.0.6 + '@types/passport-strategy': 0.2.38 + + '@types/passport-local@1.0.38': + dependencies: + '@types/express': 4.17.21 + '@types/passport': 1.0.16 + '@types/passport-strategy': 0.2.38 + + '@types/passport-oauth2@1.4.17': + dependencies: + '@types/express': 4.17.21 + '@types/oauth': 0.9.6 + '@types/passport': 1.0.16 + + '@types/passport-openidconnect@0.1.3': + dependencies: + '@types/express': 4.17.21 + '@types/oauth': 0.9.6 + '@types/passport': 1.0.16 + '@types/passport-strategy': 0.2.38 + + '@types/passport-strategy@0.2.38': + dependencies: + '@types/express': 4.17.21 + '@types/passport': 1.0.16 + + '@types/passport@1.0.16': + dependencies: + '@types/express': 4.17.21 + '@types/pg-pool@2.0.6': dependencies: '@types/pg': 8.11.6 '@types/pg@8.11.2': dependencies: - '@types/node': 20.11.26 + '@types/node': 20.12.12 pg-protocol: 1.6.0 pg-types: 4.0.2 '@types/pg@8.11.6': dependencies: - '@types/node': 20.12.12 - pg-protocol: 1.6.1 + '@types/node': 22.5.4 + pg-protocol: 1.7.1 pg-types: 4.0.2 '@types/pg@8.6.1': dependencies: - '@types/node': 22.13.9 - pg-protocol: 1.6.1 + '@types/node': 22.5.4 + pg-protocol: 1.7.1 pg-types: 2.2.0 '@types/phoenix@1.6.4': {} @@ -26059,17 +27061,17 @@ snapshots: '@types/request@2.48.12': dependencies: '@types/caseless': 0.12.5 - '@types/node': 22.13.9 + '@types/node': 22.5.4 '@types/tough-cookie': 4.0.5 form-data: 2.5.1 '@types/resolve@1.17.1': dependencies: - '@types/node': 22.13.9 + '@types/node': 22.5.4 '@types/responselike@1.0.3': dependencies: - '@types/node': 22.13.9 + '@types/node': 22.5.4 '@types/retry@0.12.0': {} @@ -26084,7 +27086,7 @@ snapshots: '@types/send@0.17.4': dependencies: '@types/mime': 1.3.5 - '@types/node': 22.13.9 + '@types/node': 22.5.4 '@types/serve-index@1.9.4': dependencies: @@ -26104,20 +27106,32 @@ snapshots: '@types/sockjs@0.3.36': dependencies: - '@types/node': 22.13.9 + '@types/node': 22.5.4 '@types/sqlite3@3.1.11': dependencies: - '@types/node': 22.13.9 + '@types/node': 22.5.4 '@types/stack-utils@2.0.3': {} '@types/streamx@2.9.5': dependencies: - '@types/node': 22.13.9 + '@types/node': 22.5.4 '@types/stylis@4.2.5': {} + '@types/superagent@8.1.9': + dependencies: + '@types/cookiejar': 2.1.5 + '@types/methods': 1.1.4 + '@types/node': 22.5.4 + form-data: 4.0.1 + + '@types/supertest@6.0.3': + dependencies: + '@types/methods': 1.1.4 + '@types/superagent': 8.1.9 + '@types/swagger-jsdoc@6.0.4': {} '@types/swagger-ui-express@4.1.6': @@ -26127,11 +27141,11 @@ snapshots: '@types/tedious@4.0.14': dependencies: - '@types/node': 22.13.9 + '@types/node': 22.5.4 '@types/testing-library__jest-dom@5.14.9': dependencies: - '@types/jest': 29.5.12 + '@types/jest': 29.5.14 '@types/tough-cookie@4.0.5': {} @@ -26170,7 +27184,7 @@ snapshots: '@types/vinyl@2.0.11': dependencies: '@types/expect': 1.20.4 - '@types/node': 22.13.9 + '@types/node': 22.5.4 '@types/webidl-conversions@7.0.3': {} @@ -26180,7 +27194,7 @@ snapshots: '@types/ws@8.5.10': dependencies: - '@types/node': 20.11.26 + '@types/node': 20.12.12 '@types/yargs-parser@21.0.3': {} @@ -26194,7 +27208,7 @@ snapshots: '@types/yauzl@2.10.3': dependencies: - '@types/node': 22.13.9 + '@types/node': 22.5.4 optional: true '@typescript-eslint/eslint-plugin@5.62.0(@typescript-eslint/parser@5.62.0(eslint@8.57.0)(typescript@5.5.2))(eslint@8.57.0)(typescript@5.5.2)': @@ -26311,46 +27325,46 @@ snapshots: '@typescript-eslint/types': 7.13.1 eslint-visitor-keys: 3.4.3 - '@uiw/codemirror-extensions-basic-setup@4.21.24(@codemirror/autocomplete@6.14.0(@codemirror/language@6.10.1)(@codemirror/state@6.4.1)(@codemirror/view@6.25.1)(@lezer/common@1.2.1))(@codemirror/commands@6.3.3)(@codemirror/language@6.10.1)(@codemirror/lint@6.5.0)(@codemirror/search@6.5.6)(@codemirror/state@6.4.1)(@codemirror/view@6.25.1)': + '@uiw/codemirror-extensions-basic-setup@4.21.24(@codemirror/autocomplete@6.14.0(@codemirror/language@6.10.1)(@codemirror/state@6.4.1)(@codemirror/view@6.26.3)(@lezer/common@1.2.1))(@codemirror/commands@6.3.3)(@codemirror/language@6.10.1)(@codemirror/lint@6.5.0)(@codemirror/search@6.5.6)(@codemirror/state@6.4.1)(@codemirror/view@6.26.3)': dependencies: - '@codemirror/autocomplete': 6.14.0(@codemirror/language@6.10.1)(@codemirror/state@6.4.1)(@codemirror/view@6.25.1)(@lezer/common@1.2.1) + '@codemirror/autocomplete': 6.14.0(@codemirror/language@6.10.1)(@codemirror/state@6.4.1)(@codemirror/view@6.26.3)(@lezer/common@1.2.1) '@codemirror/commands': 6.3.3 '@codemirror/language': 6.10.1 '@codemirror/lint': 6.5.0 '@codemirror/search': 6.5.6 '@codemirror/state': 6.4.1 - '@codemirror/view': 6.25.1 + '@codemirror/view': 6.26.3 - '@uiw/codemirror-theme-sublime@4.21.24(@codemirror/language@6.10.1)(@codemirror/state@6.4.1)(@codemirror/view@6.25.1)': + '@uiw/codemirror-theme-sublime@4.21.24(@codemirror/language@6.10.1)(@codemirror/state@6.4.1)(@codemirror/view@6.26.3)': dependencies: - '@uiw/codemirror-themes': 4.21.24(@codemirror/language@6.10.1)(@codemirror/state@6.4.1)(@codemirror/view@6.25.1) + '@uiw/codemirror-themes': 4.21.24(@codemirror/language@6.10.1)(@codemirror/state@6.4.1)(@codemirror/view@6.26.3) transitivePeerDependencies: - '@codemirror/language' - '@codemirror/state' - '@codemirror/view' - '@uiw/codemirror-theme-vscode@4.21.24(@codemirror/language@6.10.1)(@codemirror/state@6.4.1)(@codemirror/view@6.25.1)': + '@uiw/codemirror-theme-vscode@4.21.24(@codemirror/language@6.10.1)(@codemirror/state@6.4.1)(@codemirror/view@6.26.3)': dependencies: - '@uiw/codemirror-themes': 4.21.24(@codemirror/language@6.10.1)(@codemirror/state@6.4.1)(@codemirror/view@6.25.1) + '@uiw/codemirror-themes': 4.21.24(@codemirror/language@6.10.1)(@codemirror/state@6.4.1)(@codemirror/view@6.26.3) transitivePeerDependencies: - '@codemirror/language' - '@codemirror/state' - '@codemirror/view' - '@uiw/codemirror-themes@4.21.24(@codemirror/language@6.10.1)(@codemirror/state@6.4.1)(@codemirror/view@6.25.1)': + '@uiw/codemirror-themes@4.21.24(@codemirror/language@6.10.1)(@codemirror/state@6.4.1)(@codemirror/view@6.26.3)': dependencies: '@codemirror/language': 6.10.1 '@codemirror/state': 6.4.1 - '@codemirror/view': 6.25.1 + '@codemirror/view': 6.26.3 - '@uiw/react-codemirror@4.21.24(@babel/runtime@7.26.9)(@codemirror/autocomplete@6.14.0(@codemirror/language@6.10.1)(@codemirror/state@6.4.1)(@codemirror/view@6.25.1)(@lezer/common@1.2.1))(@codemirror/language@6.10.1)(@codemirror/lint@6.5.0)(@codemirror/search@6.5.6)(@codemirror/state@6.4.1)(@codemirror/theme-one-dark@6.1.2)(@codemirror/view@6.25.1)(codemirror@6.0.1(@lezer/common@1.2.1))(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': + '@uiw/react-codemirror@4.21.24(@babel/runtime@7.26.10)(@codemirror/autocomplete@6.14.0(@codemirror/language@6.10.1)(@codemirror/state@6.4.1)(@codemirror/view@6.26.3)(@lezer/common@1.2.1))(@codemirror/language@6.10.1)(@codemirror/lint@6.5.0)(@codemirror/search@6.5.6)(@codemirror/state@6.4.1)(@codemirror/theme-one-dark@6.1.2)(@codemirror/view@6.26.3)(codemirror@6.0.1(@lezer/common@1.2.1))(react-dom@18.2.0(react@18.2.0))(react@18.2.0)': dependencies: - '@babel/runtime': 7.26.9 + '@babel/runtime': 7.26.10 '@codemirror/commands': 6.3.3 '@codemirror/state': 6.4.1 '@codemirror/theme-one-dark': 6.1.2 - '@codemirror/view': 6.25.1 - '@uiw/codemirror-extensions-basic-setup': 4.21.24(@codemirror/autocomplete@6.14.0(@codemirror/language@6.10.1)(@codemirror/state@6.4.1)(@codemirror/view@6.25.1)(@lezer/common@1.2.1))(@codemirror/commands@6.3.3)(@codemirror/language@6.10.1)(@codemirror/lint@6.5.0)(@codemirror/search@6.5.6)(@codemirror/state@6.4.1)(@codemirror/view@6.25.1) + '@codemirror/view': 6.26.3 + '@uiw/codemirror-extensions-basic-setup': 4.21.24(@codemirror/autocomplete@6.14.0(@codemirror/language@6.10.1)(@codemirror/state@6.4.1)(@codemirror/view@6.26.3)(@lezer/common@1.2.1))(@codemirror/commands@6.3.3)(@codemirror/language@6.10.1)(@codemirror/lint@6.5.0)(@codemirror/search@6.5.6)(@codemirror/state@6.4.1)(@codemirror/view@6.26.3) codemirror: 6.0.1(@lezer/common@1.2.1) react: 18.2.0 react-dom: 18.2.0(react@18.2.0) @@ -26370,25 +27384,25 @@ snapshots: '@upstash/vector@1.1.5': {} - '@vitejs/plugin-react@3.1.0(vite@4.5.2(@types/node@22.13.9)(sass@1.71.1)(terser@5.29.1))': + '@vitejs/plugin-react@3.1.0(vite@4.5.2(@types/node@22.5.4)(sass@1.71.1)(terser@5.29.1))': dependencies: '@babel/core': 7.24.0 '@babel/plugin-transform-react-jsx-self': 7.23.3(@babel/core@7.24.0) '@babel/plugin-transform-react-jsx-source': 7.23.3(@babel/core@7.24.0) magic-string: 0.27.0 react-refresh: 0.14.0 - vite: 4.5.2(@types/node@22.13.9)(sass@1.71.1)(terser@5.29.1) + vite: 4.5.2(@types/node@22.5.4)(sass@1.71.1)(terser@5.29.1) transitivePeerDependencies: - supports-color - '@vitejs/plugin-react@4.2.1(vite@5.1.6(@types/node@22.13.9)(sass@1.71.1)(terser@5.29.1))': + '@vitejs/plugin-react@4.2.1(vite@5.1.6(@types/node@22.5.4)(sass@1.71.1)(terser@5.29.1))': dependencies: '@babel/core': 7.24.0 '@babel/plugin-transform-react-jsx-self': 7.23.3(@babel/core@7.24.0) '@babel/plugin-transform-react-jsx-source': 7.23.3(@babel/core@7.24.0) '@types/babel__core': 7.20.5 react-refresh: 0.14.0 - vite: 5.1.6(@types/node@22.13.9)(sass@1.71.1)(terser@5.29.1) + vite: 5.1.6(@types/node@22.5.4)(sass@1.71.1)(terser@5.29.1) transitivePeerDependencies: - supports-color @@ -27137,6 +28151,19 @@ snapshots: transitivePeerDependencies: - supports-color + babel-jest@29.7.0(@babel/core@7.24.0): + dependencies: + '@babel/core': 7.24.0 + '@jest/transform': 29.7.0 + '@types/babel__core': 7.20.5 + babel-plugin-istanbul: 6.1.1 + babel-preset-jest: 29.6.3(@babel/core@7.24.0) + chalk: 4.1.2 + graceful-fs: 4.2.11 + slash: 3.0.0 + transitivePeerDependencies: + - supports-color + babel-loader@8.3.0(@babel/core@7.24.0)(webpack@5.90.3(@swc/core@1.4.6)): dependencies: '@babel/core': 7.24.0 @@ -27167,9 +28194,16 @@ snapshots: '@types/babel__core': 7.20.5 '@types/babel__traverse': 7.20.5 + babel-plugin-jest-hoist@29.6.3: + dependencies: + '@babel/template': 7.25.9 + '@babel/types': 7.26.0 + '@types/babel__core': 7.20.5 + '@types/babel__traverse': 7.20.5 + babel-plugin-macros@3.1.0: dependencies: - '@babel/runtime': 7.26.9 + '@babel/runtime': 7.26.10 cosmiconfig: 7.1.0 resolve: 1.22.8 @@ -27207,7 +28241,7 @@ snapshots: dependencies: '@babel/core': 7.24.0 '@babel/helper-define-polyfill-provider': 0.5.0(@babel/core@7.24.0) - core-js-compat: 3.37.0 + core-js-compat: 3.36.0 transitivePeerDependencies: - supports-color @@ -27249,6 +28283,12 @@ snapshots: babel-plugin-jest-hoist: 27.5.1 babel-preset-current-node-syntax: 1.0.1(@babel/core@7.24.0) + babel-preset-jest@29.6.3(@babel/core@7.24.0): + dependencies: + '@babel/core': 7.24.0 + babel-plugin-jest-hoist: 29.6.3 + babel-preset-current-node-syntax: 1.0.1(@babel/core@7.24.0) + babel-preset-react-app@10.0.1: dependencies: '@babel/core': 7.24.0 @@ -27265,7 +28305,7 @@ snapshots: '@babel/preset-env': 7.24.5(@babel/core@7.24.0) '@babel/preset-react': 7.25.9(@babel/core@7.24.0) '@babel/preset-typescript': 7.18.6(@babel/core@7.24.0) - '@babel/runtime': 7.26.9 + '@babel/runtime': 7.26.10 babel-plugin-macros: 3.1.0 babel-plugin-transform-react-remove-prop-types: 0.4.24 transitivePeerDependencies: @@ -27362,6 +28402,8 @@ snapshots: base64-js@1.5.1: {} + base64url@3.0.1: {} + base@0.11.2: dependencies: cache-base: 1.0.1 @@ -27384,6 +28426,8 @@ snapshots: dependencies: tweetnacl: 0.14.5 + bcryptjs@2.4.3: {} + before-after-hook@2.2.3: {} bfj@7.1.0: @@ -27498,6 +28542,10 @@ snapshots: node-releases: 2.0.14 update-browserslist-db: 1.0.13(browserslist@4.23.0) + bs-logger@0.2.6: + dependencies: + fast-json-stable-stringify: 2.1.0 + bser@2.1.1: dependencies: node-int64: 0.4.0 @@ -27563,7 +28611,7 @@ snapshots: transitivePeerDependencies: - supports-color - bullmq@5.43.0: + bullmq@5.45.2: dependencies: cron-parser: 4.9.0 ioredis: 5.4.2 @@ -27660,6 +28708,10 @@ snapshots: union-value: 1.0.1 unset-value: 2.0.1 + cache-manager@6.4.2: + dependencies: + keyv: 5.3.2 + cacheable-lookup@5.0.4: {} cacheable-request@7.0.4: @@ -27834,7 +28886,7 @@ snapshots: chownr@2.0.0: {} - chromadb@1.10.0(@google/generative-ai@0.24.0)(cohere-ai@7.10.0(encoding@0.1.13))(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)): + chromadb@1.10.3(@google/generative-ai@0.24.0)(cohere-ai@7.10.0(encoding@0.1.13))(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)): dependencies: cliui: 8.0.1 isomorphic-fetch: 3.0.0(encoding@0.1.13) @@ -28185,6 +29237,23 @@ snapshots: connect-history-api-fallback@2.0.0: {} + connect-pg-simple@10.0.0: + dependencies: + pg: 8.13.3 + transitivePeerDependencies: + - pg-native + + connect-redis@8.0.2(express-session@1.18.1): + dependencies: + express-session: 1.18.1 + + connect-sqlite3@0.9.15: + dependencies: + sqlite3: 5.1.7 + transitivePeerDependencies: + - bluebird + - supports-color + console-control-strings@1.1.0: {} content-disposition@0.5.3: @@ -28205,16 +29274,29 @@ snapshots: convert-source-map@2.0.0: {} + cookie-parser@1.4.6: + dependencies: + cookie: 0.4.1 + cookie-signature: 1.0.6 + cookie-signature@1.0.6: {} + cookie-signature@1.0.7: {} + cookie-signature@1.2.2: {} cookie@0.4.0: {} + cookie@0.4.1: {} + cookie@0.5.0: {} cookie@0.7.1: {} + cookie@0.7.2: {} + + cookiejar@2.1.4: {} + copy-descriptor@0.1.1: {} copy-props@2.0.5: @@ -28289,6 +29371,21 @@ snapshots: nan: 2.22.2 optional: true + create-jest@29.7.0(@types/node@22.5.4)(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2)): + dependencies: + '@jest/types': 29.6.3 + chalk: 4.1.2 + exit: 0.1.2 + graceful-fs: 4.2.11 + jest-config: 29.7.0(@types/node@22.5.4)(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2)) + jest-util: 29.7.0 + prompts: 2.4.2 + transitivePeerDependencies: + - '@types/node' + - babel-plugin-macros + - supports-color + - ts-node + create-require@1.1.1: {} crelt@1.0.6: {} @@ -28497,6 +29594,10 @@ snapshots: csstype@3.1.3: {} + csv-parser@3.0.0: + dependencies: + minimist: 1.2.8 + cypress@13.13.0: dependencies: '@cypress/request': 3.0.1 @@ -28542,6 +29643,10 @@ snapshots: untildify: 4.0.0 yauzl: 2.10.0 + d3-array@3.2.4: + dependencies: + internmap: 2.0.3 + d3-color@3.1.0: {} d3-dispatch@3.0.1: {} @@ -28559,12 +29664,36 @@ snapshots: d3-ease@3.0.1: {} + d3-format@3.1.0: {} + d3-interpolate@3.0.1: dependencies: d3-color: 3.1.0 + d3-path@3.1.0: {} + + d3-scale@4.0.2: + dependencies: + d3-array: 3.2.4 + d3-format: 3.1.0 + d3-interpolate: 3.0.1 + d3-time: 3.1.0 + d3-time-format: 4.1.0 + d3-selection@3.0.0: {} + d3-shape@3.2.0: + dependencies: + d3-path: 3.1.0 + + d3-time-format@4.1.0: + dependencies: + d3-time: 3.1.0 + + d3-time@3.1.0: + dependencies: + d3-array: 3.2.4 + d3-timer@3.0.1: {} d3-transition@3.0.1(d3-selection@3.0.0): @@ -28621,7 +29750,7 @@ snapshots: date-fns@2.30.0: dependencies: - '@babel/runtime': 7.26.9 + '@babel/runtime': 7.26.10 dateformat@4.6.3: {} @@ -28673,6 +29802,8 @@ snapshots: decamelize@1.2.0: {} + decimal.js-light@2.5.1: {} + decimal.js@10.4.3: {} decode-named-character-reference@1.0.2: @@ -28694,6 +29825,10 @@ snapshots: dedent@0.7.0: {} + dedent@1.5.3(babel-plugin-macros@3.1.0): + optionalDependencies: + babel-plugin-macros: 3.1.0 + deep-eql@4.0.0: dependencies: type-detect: 4.0.8 @@ -28812,6 +29947,8 @@ snapshots: dependencies: repeating: 2.0.1 + detect-libc@2.0.2: {} + detect-libc@2.0.3: {} detect-newline@3.1.0: {} @@ -28899,7 +30036,7 @@ snapshots: dom-helpers@5.2.1: dependencies: - '@babel/runtime': 7.26.9 + '@babel/runtime': 7.26.10 csstype: 3.1.3 dom-serializer@0.2.2: @@ -29037,6 +30174,8 @@ snapshots: emittery@0.10.2: {} + emittery@0.13.1: {} + emittery@0.8.1: {} emoji-regex@8.0.0: {} @@ -29309,7 +30448,7 @@ snapshots: dependencies: eslint: 8.57.0 - eslint-config-react-app@7.0.1(@babel/plugin-syntax-flow@7.23.3(@babel/core@7.24.0))(@babel/plugin-transform-react-jsx@7.25.9(@babel/core@7.24.0))(eslint@8.57.0)(jest@27.5.1(bufferutil@4.0.8)(canvas@2.11.2(encoding@0.1.13))(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2))(utf-8-validate@6.0.4))(typescript@5.5.2): + eslint-config-react-app@7.0.1(@babel/plugin-syntax-flow@7.23.3(@babel/core@7.24.0))(@babel/plugin-transform-react-jsx@7.25.9(@babel/core@7.24.0))(eslint@8.57.0)(jest@27.5.1(bufferutil@4.0.8)(canvas@2.11.2(encoding@0.1.13))(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2))(utf-8-validate@6.0.4))(typescript@5.5.2): dependencies: '@babel/core': 7.24.0 '@babel/eslint-parser': 7.23.10(@babel/core@7.24.0)(eslint@8.57.0) @@ -29321,7 +30460,7 @@ snapshots: eslint: 8.57.0 eslint-plugin-flowtype: 8.0.3(@babel/plugin-syntax-flow@7.23.3(@babel/core@7.24.0))(@babel/plugin-transform-react-jsx@7.25.9(@babel/core@7.24.0))(eslint@8.57.0) eslint-plugin-import: 2.29.1(@typescript-eslint/parser@5.62.0(eslint@8.57.0)(typescript@5.5.2))(eslint@8.57.0) - eslint-plugin-jest: 25.7.0(@typescript-eslint/eslint-plugin@5.62.0(@typescript-eslint/parser@5.62.0(eslint@8.57.0)(typescript@5.5.2))(eslint@8.57.0)(typescript@5.5.2))(eslint@8.57.0)(jest@27.5.1(bufferutil@4.0.8)(canvas@2.11.2(encoding@0.1.13))(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2))(utf-8-validate@6.0.4))(typescript@5.5.2) + eslint-plugin-jest: 25.7.0(@typescript-eslint/eslint-plugin@5.62.0(@typescript-eslint/parser@5.62.0(eslint@8.57.0)(typescript@5.5.2))(eslint@8.57.0)(typescript@5.5.2))(eslint@8.57.0)(jest@27.5.1(bufferutil@4.0.8)(canvas@2.11.2(encoding@0.1.13))(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2))(utf-8-validate@6.0.4))(typescript@5.5.2) eslint-plugin-jsx-a11y: 6.8.0(eslint@8.57.0) eslint-plugin-react: 7.34.0(eslint@8.57.0) eslint-plugin-react-hooks: 4.6.0(eslint@8.57.0) @@ -29389,20 +30528,20 @@ snapshots: - eslint-import-resolver-webpack - supports-color - eslint-plugin-jest@25.7.0(@typescript-eslint/eslint-plugin@5.62.0(@typescript-eslint/parser@5.62.0(eslint@8.57.0)(typescript@5.5.2))(eslint@8.57.0)(typescript@5.5.2))(eslint@8.57.0)(jest@27.5.1(bufferutil@4.0.8)(canvas@2.11.2(encoding@0.1.13))(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2))(utf-8-validate@6.0.4))(typescript@5.5.2): + eslint-plugin-jest@25.7.0(@typescript-eslint/eslint-plugin@5.62.0(@typescript-eslint/parser@5.62.0(eslint@8.57.0)(typescript@5.5.2))(eslint@8.57.0)(typescript@5.5.2))(eslint@8.57.0)(jest@27.5.1(bufferutil@4.0.8)(canvas@2.11.2(encoding@0.1.13))(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2))(utf-8-validate@6.0.4))(typescript@5.5.2): dependencies: '@typescript-eslint/experimental-utils': 5.62.0(eslint@8.57.0)(typescript@5.5.2) eslint: 8.57.0 optionalDependencies: '@typescript-eslint/eslint-plugin': 5.62.0(@typescript-eslint/parser@5.62.0(eslint@8.57.0)(typescript@5.5.2))(eslint@8.57.0)(typescript@5.5.2) - jest: 27.5.1(bufferutil@4.0.8)(canvas@2.11.2(encoding@0.1.13))(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2))(utf-8-validate@6.0.4) + jest: 27.5.1(bufferutil@4.0.8)(canvas@2.11.2(encoding@0.1.13))(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2))(utf-8-validate@6.0.4) transitivePeerDependencies: - supports-color - typescript eslint-plugin-jsx-a11y@6.8.0(eslint@8.57.0): dependencies: - '@babel/runtime': 7.26.9 + '@babel/runtime': 7.26.10 aria-query: 5.3.0 array-includes: 3.1.7 array.prototype.flatmap: 1.3.2 @@ -29724,6 +30863,13 @@ snapshots: dependencies: basic-auth: 2.0.1 + express-mysql-session@3.0.3: + dependencies: + debug: 4.3.4(supports-color@8.1.1) + mysql2: 3.10.2 + transitivePeerDependencies: + - supports-color + express-rate-limit@6.11.2(express@4.18.3): dependencies: express: 4.18.3 @@ -29732,6 +30878,19 @@ snapshots: dependencies: express: 5.0.1 + express-session@1.18.1: + dependencies: + cookie: 0.7.2 + cookie-signature: 1.0.7 + debug: 2.6.9 + depd: 2.0.0 + on-headers: 1.0.2 + parseurl: 1.3.3 + safe-buffer: 5.2.1 + uid-safe: 2.1.5 + transitivePeerDependencies: + - supports-color + express@4.17.1: dependencies: accepts: 1.3.8 @@ -29839,6 +30998,42 @@ snapshots: transitivePeerDependencies: - supports-color + express@4.21.2: + dependencies: + accepts: 1.3.8 + array-flatten: 1.1.1 + body-parser: 2.0.2 + content-disposition: 0.5.4 + content-type: 1.0.5 + cookie: 0.7.1 + cookie-signature: 1.0.6 + debug: 2.6.9 + depd: 2.0.0 + encodeurl: 2.0.0 + escape-html: 1.0.3 + etag: 1.8.1 + finalhandler: 1.3.1 + fresh: 0.5.2 + http-errors: 2.0.0 + merge-descriptors: 1.0.3 + methods: 1.1.2 + on-finished: 2.4.1 + parseurl: 1.3.3 + path-to-regexp: 0.1.12 + proxy-addr: 2.0.7 + qs: 6.13.0 + range-parser: 1.2.1 + safe-buffer: 5.2.1 + send: 0.19.0 + serve-static: 1.16.2 + setprototypeof: 1.2.0 + statuses: 2.0.1 + type-is: 1.6.18 + utils-merge: 1.0.1 + vary: 1.1.2 + transitivePeerDependencies: + - supports-color + express@5.0.1: dependencies: accepts: 2.0.0 @@ -29941,6 +31136,8 @@ snapshots: fast-diff@1.3.0: {} + fast-equals@5.0.1: {} + fast-fifo@1.3.2: {} fast-glob@3.3.2: @@ -29963,6 +31160,8 @@ snapshots: dependencies: fastest-levenshtein: 1.0.16 + fast-safe-stringify@2.1.1: {} + fast-xml-parser@4.2.5: dependencies: strnum: 1.0.5 @@ -30044,6 +31243,10 @@ snapshots: schema-utils: 3.3.0 webpack: 5.90.3(@swc/core@1.4.6) + file-stream-rotator@0.6.1: + dependencies: + moment: 2.30.1 + file-type@16.5.4: dependencies: readable-web-to-node-stream: 3.0.2 @@ -30142,7 +31345,7 @@ snapshots: find-yarn-workspace-root2@1.2.16: dependencies: - micromatch: 4.0.5 + micromatch: 4.0.8 pkg-dir: 4.2.0 find-yarn-workspace-root@2.0.0: @@ -30193,9 +31396,9 @@ snapshots: flatted@3.3.1: {} - flowise-embed-react@3.0.3(@types/node@22.13.9)(flowise-embed@3.0.3)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)(sass@1.71.1)(terser@5.29.1)(typescript@5.5.2): + flowise-embed-react@3.0.3(@types/node@22.5.4)(flowise-embed@3.0.3)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)(sass@1.71.1)(terser@5.29.1)(typescript@5.5.2): dependencies: - '@ladle/react': 2.5.1(@types/node@22.13.9)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)(sass@1.71.1)(terser@5.29.1)(typescript@5.5.2) + '@ladle/react': 2.5.1(@types/node@22.5.4)(react-dom@18.2.0(react@18.2.0))(react@18.2.0)(sass@1.71.1)(terser@5.29.1)(typescript@5.5.2) flowise-embed: 3.0.3 react: 18.2.0 transitivePeerDependencies: @@ -30359,6 +31562,12 @@ snapshots: dependencies: fetch-blob: 3.2.0 + formidable@3.5.4: + dependencies: + '@paralleldrive/cuid2': 2.2.2 + dezalgo: 1.0.4 + once: 1.4.0 + formik@2.4.5(react@18.2.0): dependencies: '@types/hoist-non-react-statics': 3.3.5 @@ -30814,7 +32023,7 @@ snapshots: groq-sdk@0.5.0(encoding@0.1.13): dependencies: '@types/node': 18.19.23 - '@types/node-fetch': 2.6.11 + '@types/node-fetch': 2.6.12 abort-controller: 3.0.0 agentkeepalive: 4.5.0 form-data-encoder: 1.7.2 @@ -30885,6 +32094,15 @@ snapshots: handle-thing@2.0.1: {} + handlebars@4.7.8: + dependencies: + minimist: 1.2.8 + neo-async: 2.6.2 + source-map: 0.6.1 + wordwrap: 1.0.0 + optionalDependencies: + uglify-js: 3.19.3 + harmony-reflect@1.6.2: {} has-ansi@2.0.0: @@ -31327,6 +32545,8 @@ snapshots: immediate@3.0.6: {} + immer@10.1.1: {} + immer@9.0.21: {} immutable@4.3.5: {} @@ -31404,6 +32624,8 @@ snapshots: hasown: 2.0.2 side-channel: 1.0.6 + internmap@2.0.3: {} + interpret@1.4.0: {} invariant@2.2.4: @@ -31740,6 +32962,16 @@ snapshots: transitivePeerDependencies: - supports-color + istanbul-lib-instrument@6.0.3: + dependencies: + '@babel/core': 7.24.0 + '@babel/parser': 7.26.2 + '@istanbuljs/schema': 0.1.3 + istanbul-lib-coverage: 3.2.2 + semver: 7.7.1 + transitivePeerDependencies: + - supports-color + istanbul-lib-report@3.0.1: dependencies: istanbul-lib-coverage: 3.2.2 @@ -31788,12 +33020,18 @@ snapshots: execa: 5.1.1 throat: 6.0.2 + jest-changed-files@29.7.0: + dependencies: + execa: 5.1.1 + jest-util: 29.7.0 + p-limit: 3.1.0 + jest-circus@27.5.1: dependencies: '@jest/environment': 27.5.1 '@jest/test-result': 27.5.1 '@jest/types': 27.5.1 - '@types/node': 22.13.9 + '@types/node': 22.5.4 chalk: 4.1.2 co: 4.6.0 dedent: 0.7.0 @@ -31812,16 +33050,42 @@ snapshots: transitivePeerDependencies: - supports-color - jest-cli@27.5.1(bufferutil@4.0.8)(canvas@2.11.2(encoding@0.1.13))(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2))(utf-8-validate@6.0.4): + jest-circus@29.7.0(babel-plugin-macros@3.1.0): dependencies: - '@jest/core': 27.5.1(bufferutil@4.0.8)(canvas@2.11.2(encoding@0.1.13))(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2))(utf-8-validate@6.0.4) + '@jest/environment': 29.7.0 + '@jest/expect': 29.7.0 + '@jest/test-result': 29.7.0 + '@jest/types': 29.6.3 + '@types/node': 22.5.4 + chalk: 4.1.2 + co: 4.6.0 + dedent: 1.5.3(babel-plugin-macros@3.1.0) + is-generator-fn: 2.1.0 + jest-each: 29.7.0 + jest-matcher-utils: 29.7.0 + jest-message-util: 29.7.0 + jest-runtime: 29.7.0 + jest-snapshot: 29.7.0 + jest-util: 29.7.0 + p-limit: 3.1.0 + pretty-format: 29.7.0 + pure-rand: 6.1.0 + slash: 3.0.0 + stack-utils: 2.0.6 + transitivePeerDependencies: + - babel-plugin-macros + - supports-color + + jest-cli@27.5.1(bufferutil@4.0.8)(canvas@2.11.2(encoding@0.1.13))(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2))(utf-8-validate@6.0.4): + dependencies: + '@jest/core': 27.5.1(bufferutil@4.0.8)(canvas@2.11.2(encoding@0.1.13))(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2))(utf-8-validate@6.0.4) '@jest/test-result': 27.5.1 '@jest/types': 27.5.1 chalk: 4.1.2 exit: 0.1.2 graceful-fs: 4.2.11 import-local: 3.1.0 - jest-config: 27.5.1(bufferutil@4.0.8)(canvas@2.11.2(encoding@0.1.13))(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2))(utf-8-validate@6.0.4) + jest-config: 27.5.1(bufferutil@4.0.8)(canvas@2.11.2(encoding@0.1.13))(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2))(utf-8-validate@6.0.4) jest-util: 27.5.1 jest-validate: 27.5.1 prompts: 2.4.2 @@ -31833,7 +33097,26 @@ snapshots: - ts-node - utf-8-validate - jest-config@27.5.1(bufferutil@4.0.8)(canvas@2.11.2(encoding@0.1.13))(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2))(utf-8-validate@6.0.4): + jest-cli@29.7.0(@types/node@22.5.4)(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2)): + dependencies: + '@jest/core': 29.7.0(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2)) + '@jest/test-result': 29.7.0 + '@jest/types': 29.6.3 + chalk: 4.1.2 + create-jest: 29.7.0(@types/node@22.5.4)(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2)) + exit: 0.1.2 + import-local: 3.1.0 + jest-config: 29.7.0(@types/node@22.5.4)(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2)) + jest-util: 29.7.0 + jest-validate: 29.7.0 + yargs: 17.7.2 + transitivePeerDependencies: + - '@types/node' + - babel-plugin-macros + - supports-color + - ts-node + + jest-config@27.5.1(bufferutil@4.0.8)(canvas@2.11.2(encoding@0.1.13))(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2))(utf-8-validate@6.0.4): dependencies: '@babel/core': 7.24.0 '@jest/test-sequencer': 27.5.1 @@ -31854,19 +33137,50 @@ snapshots: jest-runner: 27.5.1(bufferutil@4.0.8)(canvas@2.11.2(encoding@0.1.13))(utf-8-validate@6.0.4) jest-util: 27.5.1 jest-validate: 27.5.1 - micromatch: 4.0.5 + micromatch: 4.0.8 parse-json: 5.2.0 pretty-format: 27.5.1 slash: 3.0.0 strip-json-comments: 3.1.1 optionalDependencies: - ts-node: 10.9.2(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2) + ts-node: 10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2) transitivePeerDependencies: - bufferutil - canvas - supports-color - utf-8-validate + jest-config@29.7.0(@types/node@22.5.4)(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2)): + dependencies: + '@babel/core': 7.24.0 + '@jest/test-sequencer': 29.7.0 + '@jest/types': 29.6.3 + babel-jest: 29.7.0(@babel/core@7.24.0) + chalk: 4.1.2 + ci-info: 3.9.0 + deepmerge: 4.3.1 + glob: 7.2.3 + graceful-fs: 4.2.11 + jest-circus: 29.7.0(babel-plugin-macros@3.1.0) + jest-environment-node: 29.7.0 + jest-get-type: 29.6.3 + jest-regex-util: 29.6.3 + jest-resolve: 29.7.0 + jest-runner: 29.7.0 + jest-util: 29.7.0 + jest-validate: 29.7.0 + micromatch: 4.0.8 + parse-json: 5.2.0 + pretty-format: 29.7.0 + slash: 3.0.0 + strip-json-comments: 3.1.1 + optionalDependencies: + '@types/node': 22.5.4 + ts-node: 10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2) + transitivePeerDependencies: + - babel-plugin-macros + - supports-color + jest-diff@27.5.1: dependencies: chalk: 4.1.2 @@ -31885,6 +33199,10 @@ snapshots: dependencies: detect-newline: 3.1.0 + jest-docblock@29.7.0: + dependencies: + detect-newline: 3.1.0 + jest-each@27.5.1: dependencies: '@jest/types': 27.5.1 @@ -31893,12 +33211,20 @@ snapshots: jest-util: 27.5.1 pretty-format: 27.5.1 + jest-each@29.7.0: + dependencies: + '@jest/types': 29.6.3 + chalk: 4.1.2 + jest-get-type: 29.6.3 + jest-util: 29.7.0 + pretty-format: 29.7.0 + jest-environment-jsdom@27.5.1(bufferutil@4.0.8)(canvas@2.11.2(encoding@0.1.13))(utf-8-validate@6.0.4): dependencies: '@jest/environment': 27.5.1 '@jest/fake-timers': 27.5.1 '@jest/types': 27.5.1 - '@types/node': 22.13.9 + '@types/node': 22.5.4 jest-mock: 27.5.1 jest-util: 27.5.1 jsdom: 16.7.0(bufferutil@4.0.8)(canvas@2.11.2(encoding@0.1.13))(utf-8-validate@6.0.4) @@ -31913,10 +33239,19 @@ snapshots: '@jest/environment': 27.5.1 '@jest/fake-timers': 27.5.1 '@jest/types': 27.5.1 - '@types/node': 22.13.9 + '@types/node': 22.5.4 jest-mock: 27.5.1 jest-util: 27.5.1 + jest-environment-node@29.7.0: + dependencies: + '@jest/environment': 29.7.0 + '@jest/fake-timers': 29.7.0 + '@jest/types': 29.6.3 + '@types/node': 22.5.4 + jest-mock: 29.7.0 + jest-util: 29.7.0 + jest-get-type@27.5.1: {} jest-get-type@29.6.3: {} @@ -31925,7 +33260,7 @@ snapshots: dependencies: '@jest/types': 27.5.1 '@types/graceful-fs': 4.1.9 - '@types/node': 22.13.9 + '@types/node': 22.5.4 anymatch: 3.1.3 fb-watchman: 2.0.2 graceful-fs: 4.2.11 @@ -31938,13 +33273,29 @@ snapshots: optionalDependencies: fsevents: 2.3.3 + jest-haste-map@29.7.0: + dependencies: + '@jest/types': 29.6.3 + '@types/graceful-fs': 4.1.9 + '@types/node': 22.5.4 + anymatch: 3.1.3 + fb-watchman: 2.0.2 + graceful-fs: 4.2.11 + jest-regex-util: 29.6.3 + jest-util: 29.7.0 + jest-worker: 29.7.0 + micromatch: 4.0.8 + walker: 1.0.8 + optionalDependencies: + fsevents: 2.3.3 + jest-jasmine2@27.5.1: dependencies: '@jest/environment': 27.5.1 '@jest/source-map': 27.5.1 '@jest/test-result': 27.5.1 '@jest/types': 27.5.1 - '@types/node': 22.13.9 + '@types/node': 22.5.4 chalk: 4.1.2 co: 4.6.0 expect: 27.5.1 @@ -31965,6 +33316,11 @@ snapshots: jest-get-type: 27.5.1 pretty-format: 27.5.1 + jest-leak-detector@29.7.0: + dependencies: + jest-get-type: 29.6.3 + pretty-format: 29.7.0 + jest-matcher-utils@27.5.1: dependencies: chalk: 4.1.2 @@ -31986,7 +33342,7 @@ snapshots: '@types/stack-utils': 2.0.3 chalk: 4.1.2 graceful-fs: 4.2.11 - micromatch: 4.0.5 + micromatch: 4.0.8 pretty-format: 27.5.1 slash: 3.0.0 stack-utils: 2.0.6 @@ -31998,7 +33354,7 @@ snapshots: '@types/stack-utils': 2.0.3 chalk: 4.1.2 graceful-fs: 4.2.11 - micromatch: 4.0.5 + micromatch: 4.0.8 pretty-format: 28.1.3 slash: 3.0.0 stack-utils: 2.0.6 @@ -32010,7 +33366,7 @@ snapshots: '@types/stack-utils': 2.0.3 chalk: 4.1.2 graceful-fs: 4.2.11 - micromatch: 4.0.5 + micromatch: 4.0.8 pretty-format: 29.7.0 slash: 3.0.0 stack-utils: 2.0.6 @@ -32018,16 +33374,28 @@ snapshots: jest-mock@27.5.1: dependencies: '@jest/types': 27.5.1 - '@types/node': 22.13.9 + '@types/node': 22.5.4 + + jest-mock@29.7.0: + dependencies: + '@jest/types': 29.6.3 + '@types/node': 22.5.4 + jest-util: 29.7.0 jest-pnp-resolver@1.2.3(jest-resolve@27.5.1): optionalDependencies: jest-resolve: 27.5.1 + jest-pnp-resolver@1.2.3(jest-resolve@29.7.0): + optionalDependencies: + jest-resolve: 29.7.0 + jest-regex-util@27.5.1: {} jest-regex-util@28.0.2: {} + jest-regex-util@29.6.3: {} + jest-resolve-dependencies@27.5.1: dependencies: '@jest/types': 27.5.1 @@ -32036,6 +33404,13 @@ snapshots: transitivePeerDependencies: - supports-color + jest-resolve-dependencies@29.7.0: + dependencies: + jest-regex-util: 29.6.3 + jest-snapshot: 29.7.0 + transitivePeerDependencies: + - supports-color + jest-resolve@27.5.1: dependencies: '@jest/types': 27.5.1 @@ -32049,6 +33424,18 @@ snapshots: resolve.exports: 1.1.1 slash: 3.0.0 + jest-resolve@29.7.0: + dependencies: + chalk: 4.1.2 + graceful-fs: 4.2.11 + jest-haste-map: 29.7.0 + jest-pnp-resolver: 1.2.3(jest-resolve@29.7.0) + jest-util: 29.7.0 + jest-validate: 29.7.0 + resolve: 1.22.8 + resolve.exports: 2.0.3 + slash: 3.0.0 + jest-runner@27.5.1(bufferutil@4.0.8)(canvas@2.11.2(encoding@0.1.13))(utf-8-validate@6.0.4): dependencies: '@jest/console': 27.5.1 @@ -32056,7 +33443,7 @@ snapshots: '@jest/test-result': 27.5.1 '@jest/transform': 27.5.1 '@jest/types': 27.5.1 - '@types/node': 22.13.9 + '@types/node': 22.5.4 chalk: 4.1.2 emittery: 0.8.1 graceful-fs: 4.2.11 @@ -32078,6 +33465,32 @@ snapshots: - supports-color - utf-8-validate + jest-runner@29.7.0: + dependencies: + '@jest/console': 29.7.0 + '@jest/environment': 29.7.0 + '@jest/test-result': 29.7.0 + '@jest/transform': 29.7.0 + '@jest/types': 29.6.3 + '@types/node': 22.5.4 + chalk: 4.1.2 + emittery: 0.13.1 + graceful-fs: 4.2.11 + jest-docblock: 29.7.0 + jest-environment-node: 29.7.0 + jest-haste-map: 29.7.0 + jest-leak-detector: 29.7.0 + jest-message-util: 29.7.0 + jest-resolve: 29.7.0 + jest-runtime: 29.7.0 + jest-util: 29.7.0 + jest-watcher: 29.7.0 + jest-worker: 29.7.0 + p-limit: 3.1.0 + source-map-support: 0.5.13 + transitivePeerDependencies: + - supports-color + jest-runtime@27.5.1: dependencies: '@jest/environment': 27.5.1 @@ -32105,9 +33518,36 @@ snapshots: transitivePeerDependencies: - supports-color + jest-runtime@29.7.0: + dependencies: + '@jest/environment': 29.7.0 + '@jest/fake-timers': 29.7.0 + '@jest/globals': 29.7.0 + '@jest/source-map': 29.6.3 + '@jest/test-result': 29.7.0 + '@jest/transform': 29.7.0 + '@jest/types': 29.6.3 + '@types/node': 22.5.4 + chalk: 4.1.2 + cjs-module-lexer: 1.2.3 + collect-v8-coverage: 1.0.2 + glob: 7.2.3 + graceful-fs: 4.2.11 + jest-haste-map: 29.7.0 + jest-message-util: 29.7.0 + jest-mock: 29.7.0 + jest-regex-util: 29.6.3 + jest-resolve: 29.7.0 + jest-snapshot: 29.7.0 + jest-util: 29.7.0 + slash: 3.0.0 + strip-bom: 4.0.0 + transitivePeerDependencies: + - supports-color + jest-serializer@27.5.1: dependencies: - '@types/node': 22.13.9 + '@types/node': 22.5.4 graceful-fs: 4.2.11 jest-snapshot@27.5.1: @@ -32137,10 +33577,35 @@ snapshots: transitivePeerDependencies: - supports-color + jest-snapshot@29.7.0: + dependencies: + '@babel/core': 7.24.0 + '@babel/generator': 7.26.2 + '@babel/plugin-syntax-jsx': 7.25.9(@babel/core@7.24.0) + '@babel/plugin-syntax-typescript': 7.23.3(@babel/core@7.24.0) + '@babel/types': 7.26.0 + '@jest/expect-utils': 29.7.0 + '@jest/transform': 29.7.0 + '@jest/types': 29.6.3 + babel-preset-current-node-syntax: 1.0.1(@babel/core@7.24.0) + chalk: 4.1.2 + expect: 29.7.0 + graceful-fs: 4.2.11 + jest-diff: 29.7.0 + jest-get-type: 29.6.3 + jest-matcher-utils: 29.7.0 + jest-message-util: 29.7.0 + jest-util: 29.7.0 + natural-compare: 1.4.0 + pretty-format: 29.7.0 + semver: 7.7.1 + transitivePeerDependencies: + - supports-color + jest-util@27.5.1: dependencies: '@jest/types': 27.5.1 - '@types/node': 22.13.9 + '@types/node': 22.5.4 chalk: 4.1.2 ci-info: 3.9.0 graceful-fs: 4.2.11 @@ -32149,7 +33614,7 @@ snapshots: jest-util@28.1.3: dependencies: '@jest/types': 28.1.3 - '@types/node': 22.13.9 + '@types/node': 22.5.4 chalk: 4.1.2 ci-info: 3.9.0 graceful-fs: 4.2.11 @@ -32158,7 +33623,7 @@ snapshots: jest-util@29.7.0: dependencies: '@jest/types': 29.6.3 - '@types/node': 22.13.9 + '@types/node': 22.5.4 chalk: 4.1.2 ci-info: 3.9.0 graceful-fs: 4.2.11 @@ -32173,11 +33638,20 @@ snapshots: leven: 3.1.0 pretty-format: 27.5.1 - jest-watch-typeahead@1.1.0(jest@27.5.1(bufferutil@4.0.8)(canvas@2.11.2(encoding@0.1.13))(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2))(utf-8-validate@6.0.4)): + jest-validate@29.7.0: + dependencies: + '@jest/types': 29.6.3 + camelcase: 6.3.0 + chalk: 4.1.2 + jest-get-type: 29.6.3 + leven: 3.1.0 + pretty-format: 29.7.0 + + jest-watch-typeahead@1.1.0(jest@27.5.1(bufferutil@4.0.8)(canvas@2.11.2(encoding@0.1.13))(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2))(utf-8-validate@6.0.4)): dependencies: ansi-escapes: 4.3.2 chalk: 4.1.2 - jest: 27.5.1(bufferutil@4.0.8)(canvas@2.11.2(encoding@0.1.13))(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2))(utf-8-validate@6.0.4) + jest: 27.5.1(bufferutil@4.0.8)(canvas@2.11.2(encoding@0.1.13))(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2))(utf-8-validate@6.0.4) jest-regex-util: 28.0.2 jest-watcher: 28.1.3 slash: 4.0.0 @@ -32188,7 +33662,7 @@ snapshots: dependencies: '@jest/test-result': 27.5.1 '@jest/types': 27.5.1 - '@types/node': 22.13.9 + '@types/node': 22.5.4 ansi-escapes: 4.3.2 chalk: 4.1.2 jest-util: 27.5.1 @@ -32198,36 +33672,54 @@ snapshots: dependencies: '@jest/test-result': 28.1.3 '@jest/types': 28.1.3 - '@types/node': 22.13.9 + '@types/node': 22.5.4 ansi-escapes: 4.3.2 chalk: 4.1.2 emittery: 0.10.2 jest-util: 28.1.3 string-length: 4.0.2 + jest-watcher@29.7.0: + dependencies: + '@jest/test-result': 29.7.0 + '@jest/types': 29.6.3 + '@types/node': 22.5.4 + ansi-escapes: 4.3.2 + chalk: 4.1.2 + emittery: 0.13.1 + jest-util: 29.7.0 + string-length: 4.0.2 + jest-worker@26.6.2: dependencies: - '@types/node': 22.13.9 + '@types/node': 22.5.4 merge-stream: 2.0.0 supports-color: 7.2.0 jest-worker@27.5.1: dependencies: - '@types/node': 22.13.9 + '@types/node': 22.5.4 merge-stream: 2.0.0 supports-color: 8.1.1 jest-worker@28.1.3: dependencies: - '@types/node': 22.13.9 + '@types/node': 22.5.4 + merge-stream: 2.0.0 + supports-color: 8.1.1 + + jest-worker@29.7.0: + dependencies: + '@types/node': 22.5.4 + jest-util: 29.7.0 merge-stream: 2.0.0 supports-color: 8.1.1 - jest@27.5.1(bufferutil@4.0.8)(canvas@2.11.2(encoding@0.1.13))(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2))(utf-8-validate@6.0.4): + jest@27.5.1(bufferutil@4.0.8)(canvas@2.11.2(encoding@0.1.13))(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2))(utf-8-validate@6.0.4): dependencies: - '@jest/core': 27.5.1(bufferutil@4.0.8)(canvas@2.11.2(encoding@0.1.13))(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2))(utf-8-validate@6.0.4) + '@jest/core': 27.5.1(bufferutil@4.0.8)(canvas@2.11.2(encoding@0.1.13))(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2))(utf-8-validate@6.0.4) import-local: 3.1.0 - jest-cli: 27.5.1(bufferutil@4.0.8)(canvas@2.11.2(encoding@0.1.13))(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2))(utf-8-validate@6.0.4) + jest-cli: 27.5.1(bufferutil@4.0.8)(canvas@2.11.2(encoding@0.1.13))(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2))(utf-8-validate@6.0.4) transitivePeerDependencies: - bufferutil - canvas @@ -32235,6 +33727,18 @@ snapshots: - ts-node - utf-8-validate + jest@29.7.0(@types/node@22.5.4)(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2)): + dependencies: + '@jest/core': 29.7.0(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2)) + '@jest/types': 29.6.3 + import-local: 3.1.0 + jest-cli: 29.7.0(@types/node@22.5.4)(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2)) + transitivePeerDependencies: + - '@types/node' + - babel-plugin-macros + - supports-color + - ts-node + jiti@1.21.0: {} jmespath@0.16.0: {} @@ -32437,7 +33941,7 @@ snapshots: jsonpointer@5.0.1: {} - jsonrepair@3.11.2: {} + jsonrepair@3.12.0: {} jsonwebtoken@9.0.2: dependencies: @@ -32503,6 +34007,8 @@ snapshots: jwt-decode@3.1.2: {} + jwt-decode@4.0.0: {} + katex@0.16.9: dependencies: commander: 8.3.0 @@ -32511,6 +34017,10 @@ snapshots: dependencies: json-buffer: 3.0.1 + keyv@5.3.2: + dependencies: + '@keyv/serialize': 1.0.3 + kill-port@2.0.1: dependencies: get-them-args: 1.3.2 @@ -32536,7 +34046,7 @@ snapshots: kuler@2.0.0: {} - langchain@0.3.5(@langchain/anthropic@0.3.14(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13))(@langchain/aws@0.1.4(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/cohere@0.0.7(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/google-genai@0.2.3(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(zod@3.22.4))(@langchain/google-vertexai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(zod@3.22.4))(@langchain/groq@0.1.2(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)))(@langchain/mistralai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/ollama@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(axios@1.7.9)(cheerio@1.0.0-rc.12)(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(typeorm@0.3.20(ioredis@5.3.2)(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2)))(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)): + langchain@0.3.6(@langchain/anthropic@0.3.14(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13))(@langchain/aws@0.1.4(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/cohere@0.0.7(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/google-genai@0.2.3(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(zod@3.22.4))(@langchain/google-vertexai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(zod@3.22.4))(@langchain/groq@0.1.2(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)))(@langchain/mistralai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/ollama@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(axios@1.7.9)(cheerio@1.0.0-rc.12)(encoding@0.1.13)(handlebars@4.7.8)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(typeorm@0.3.20(ioredis@5.3.2)(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2)))(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)): dependencies: '@langchain/core': 0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)) '@langchain/openai': 0.3.13(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)) @@ -32544,7 +34054,7 @@ snapshots: js-tiktoken: 1.0.12 js-yaml: 4.1.0 jsonpointer: 5.0.1 - langsmith: 0.2.5(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)) + langsmith: 0.2.8(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)) openapi-types: 12.1.3 p-retry: 4.6.2 uuid: 10.0.0 @@ -32562,7 +34072,8 @@ snapshots: '@langchain/ollama': 0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))) axios: 1.7.9(debug@4.3.4) cheerio: 1.0.0-rc.12 - typeorm: 0.3.20(ioredis@5.3.2)(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2)) + handlebars: 4.7.8 + typeorm: 0.3.20(ioredis@5.3.2)(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2)) transitivePeerDependencies: - encoding - openai @@ -32574,9 +34085,9 @@ snapshots: dependencies: mustache: 4.2.0 - langfuse-langchain@3.3.4(langchain@0.3.5(@langchain/anthropic@0.3.14(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13))(@langchain/aws@0.1.4(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/cohere@0.0.7(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/google-genai@0.2.3(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(zod@3.22.4))(@langchain/google-vertexai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(zod@3.22.4))(@langchain/groq@0.1.2(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)))(@langchain/mistralai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/ollama@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(axios@1.7.9)(cheerio@1.0.0-rc.12)(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(typeorm@0.3.20(ioredis@5.3.2)(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2)))(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))): + langfuse-langchain@3.3.4(langchain@0.3.6(@langchain/anthropic@0.3.14(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13))(@langchain/aws@0.1.4(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/cohere@0.0.7(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/google-genai@0.2.3(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(zod@3.22.4))(@langchain/google-vertexai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(zod@3.22.4))(@langchain/groq@0.1.2(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)))(@langchain/mistralai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/ollama@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(axios@1.7.9)(cheerio@1.0.0-rc.12)(encoding@0.1.13)(handlebars@4.7.8)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(typeorm@0.3.20(ioredis@5.3.2)(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2)))(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))): dependencies: - langchain: 0.3.5(@langchain/anthropic@0.3.14(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13))(@langchain/aws@0.1.4(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/cohere@0.0.7(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/google-genai@0.2.3(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(zod@3.22.4))(@langchain/google-vertexai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(zod@3.22.4))(@langchain/groq@0.1.2(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)))(@langchain/mistralai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/ollama@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(axios@1.7.9)(cheerio@1.0.0-rc.12)(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(typeorm@0.3.20(ioredis@5.3.2)(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2)))(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)) + langchain: 0.3.6(@langchain/anthropic@0.3.14(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13))(@langchain/aws@0.1.4(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/cohere@0.0.7(encoding@0.1.13)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(@langchain/google-genai@0.2.3(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(zod@3.22.4))(@langchain/google-vertexai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(zod@3.22.4))(@langchain/groq@0.1.2(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)))(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)))(@langchain/mistralai@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(@langchain/ollama@0.2.0(@langchain/core@0.3.37(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))))(axios@1.7.9)(cheerio@1.0.0-rc.12)(encoding@0.1.13)(handlebars@4.7.8)(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4))(typeorm@0.3.20(ioredis@5.3.2)(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2)))(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)) langfuse: 3.3.4 langfuse-core: 3.3.4 @@ -32592,18 +34103,7 @@ snapshots: p-retry: 4.6.2 uuid: 9.0.1 - langsmith@0.2.15(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)): - dependencies: - '@types/uuid': 10.0.0 - commander: 10.0.1 - p-queue: 6.6.2 - p-retry: 4.6.2 - semver: 7.7.1 - uuid: 10.0.0 - optionalDependencies: - openai: 4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4) - - langsmith@0.2.5(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)): + langsmith@0.2.8(openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4)): dependencies: '@types/uuid': 10.0.0 commander: 10.0.1 @@ -32982,6 +34482,8 @@ snapshots: lru-cache@7.18.3: {} + lru-cache@8.0.5: {} + lru-cache@9.1.2: {} lru.min@1.1.1: {} @@ -33324,7 +34826,7 @@ snapshots: binaryextensions: 4.19.0 commondir: 1.0.1 deep-extend: 0.6.0 - ejs: 3.1.9 + ejs: 3.1.10 globby: 11.1.0 isbinaryfile: 5.0.2 minimatch: 7.4.6 @@ -33341,24 +34843,26 @@ snapshots: vinyl: 2.2.1 vinyl-file: 3.0.0 - mem0ai@2.1.12(@anthropic-ai/sdk@0.37.0(encoding@0.1.13))(@qdrant/js-client-rest@1.9.0(typescript@5.5.2))(@supabase/supabase-js@2.39.8(bufferutil@4.0.8)(utf-8-validate@6.0.4))(@types/jest@29.5.12)(@types/pg@8.11.2)(@types/sqlite3@3.1.11)(encoding@0.1.13)(groq-sdk@0.5.0(encoding@0.1.13))(neo4j-driver@5.27.0)(ollama@0.5.11)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)): + mem0ai@2.1.16(@anthropic-ai/sdk@0.37.0(encoding@0.1.13))(@google/genai@0.7.0(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.4))(@mistralai/mistralai@0.1.3(encoding@0.1.13))(@qdrant/js-client-rest@1.9.0(typescript@5.5.2))(@supabase/supabase-js@2.39.8(bufferutil@4.0.8)(utf-8-validate@6.0.4))(@types/jest@29.5.14)(@types/pg@8.11.2)(@types/sqlite3@3.1.11)(encoding@0.1.13)(groq-sdk@0.5.0(encoding@0.1.13))(neo4j-driver@5.27.0)(ollama@0.5.11)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4)): dependencies: '@anthropic-ai/sdk': 0.37.0(encoding@0.1.13) + '@google/genai': 0.7.0(bufferutil@4.0.8)(encoding@0.1.13)(utf-8-validate@6.0.4) + '@mistralai/mistralai': 0.1.3(encoding@0.1.13) '@qdrant/js-client-rest': 1.9.0(typescript@5.5.2) '@supabase/supabase-js': 2.39.8(bufferutil@4.0.8)(utf-8-validate@6.0.4) - '@types/jest': 29.5.12 + '@types/jest': 29.5.14 '@types/pg': 8.11.2 '@types/sqlite3': 3.1.11 axios: 1.7.9(debug@4.3.4) groq-sdk: 0.5.0(encoding@0.1.13) neo4j-driver: 5.27.0 ollama: 0.5.11 - openai: 4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.22.4) + openai: 4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.24.2) pg: 8.11.3 redis: 4.6.13 sqlite3: 5.1.7 uuid: 9.0.1 - zod: 3.22.4 + zod: 3.24.2 transitivePeerDependencies: - debug - encoding @@ -33662,6 +35166,8 @@ snapshots: mime@1.6.0: {} + mime@2.6.0: {} + mime@3.0.0: {} mimic-fn@2.1.0: {} @@ -33899,6 +35405,17 @@ snapshots: mute-stream@0.0.8: {} + mysql2@3.10.2: + dependencies: + denque: 2.1.0 + generate-function: 2.3.1 + iconv-lite: 0.6.3 + long: 5.2.3 + lru-cache: 8.0.5 + named-placeholders: 1.1.3 + seq-queue: 0.0.5 + sqlstring: 2.3.3 + mysql2@3.11.4: dependencies: aws-ssl-profiles: 1.1.2 @@ -34076,6 +35593,8 @@ snapshots: node-releases@2.0.14: {} + nodemailer@6.9.15: {} + nodemon@2.0.22: dependencies: chokidar: 3.6.0 @@ -34279,6 +35798,10 @@ snapshots: nwsapi@2.2.7: {} + oauth@0.10.0: {} + + oauth@0.9.15: {} + object-assign@4.1.1: {} object-copy@0.1.0: @@ -34375,12 +35898,12 @@ snapshots: obuf@1.1.2: {} - oclif@3.17.2(@swc/core@1.4.6)(@types/node@22.13.9)(encoding@0.1.13)(mem-fs@2.3.0)(typescript@5.5.2): + oclif@3.17.2(@swc/core@1.4.6)(@types/node@22.5.4)(encoding@0.1.13)(mem-fs@2.3.0)(typescript@5.5.2): dependencies: - '@oclif/core': 2.15.0(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2) - '@oclif/plugin-help': 5.2.20(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2) - '@oclif/plugin-not-found': 2.4.3(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2) - '@oclif/plugin-warn-if-update-available': 2.1.1(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2) + '@oclif/core': 2.15.0(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2) + '@oclif/plugin-help': 5.2.20(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2) + '@oclif/plugin-not-found': 2.4.3(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2) + '@oclif/plugin-warn-if-update-available': 2.1.1(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2) async-retry: 1.3.3 aws-sdk: 2.1575.0 concurrently: 7.6.0 @@ -34481,6 +36004,21 @@ snapshots: transitivePeerDependencies: - encoding + openai@4.96.0(encoding@0.1.13)(ws@8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4))(zod@3.24.2): + dependencies: + '@types/node': 18.19.23 + '@types/node-fetch': 2.6.12 + abort-controller: 3.0.0 + agentkeepalive: 4.5.0 + form-data-encoder: 1.7.2 + formdata-node: 4.4.1 + node-fetch: 2.7.0(encoding@0.1.13) + optionalDependencies: + ws: 8.18.0(bufferutil@4.0.8)(utf-8-validate@6.0.4) + zod: 3.24.2 + transitivePeerDependencies: + - encoding + openapi-types@12.1.3: {} openapi-typescript-fetch@1.1.3: {} @@ -34748,6 +36286,67 @@ snapshots: pascalcase@0.1.1: {} + passport-auth0@1.4.4: + dependencies: + axios: 1.7.9(debug@4.3.4) + passport-oauth: 1.0.0 + passport-oauth2: 1.8.0 + transitivePeerDependencies: + - debug + + passport-cookie@1.0.9: + dependencies: + passport-strategy: 1.0.0 + + passport-github@1.1.0: + dependencies: + passport-oauth2: 1.8.0 + + passport-google-oauth20@2.0.0: + dependencies: + passport-oauth2: 1.8.0 + + passport-jwt@4.0.1: + dependencies: + jsonwebtoken: 9.0.2 + passport-strategy: 1.0.0 + + passport-local@1.0.0: + dependencies: + passport-strategy: 1.0.0 + + passport-oauth1@1.3.0: + dependencies: + oauth: 0.9.15 + passport-strategy: 1.0.0 + utils-merge: 1.0.1 + + passport-oauth2@1.8.0: + dependencies: + base64url: 3.0.1 + oauth: 0.10.0 + passport-strategy: 1.0.0 + uid2: 0.0.4 + utils-merge: 1.0.1 + + passport-oauth@1.0.0: + dependencies: + passport-oauth1: 1.3.0 + passport-oauth2: 1.8.0 + + passport-openidconnect@0.1.2: + dependencies: + oauth: 0.10.0 + passport-strategy: 1.0.0 + + passport-strategy@1.0.0: {} + + passport@0.7.0: + dependencies: + passport-strategy: 1.0.0 + pause: 0.0.1 + utils-merge: 1.0.1 + password-prompt@1.1.3: dependencies: ansi-escapes: 4.3.2 @@ -34803,6 +36402,8 @@ snapshots: dependencies: through: 2.3.8 + pause@0.0.1: {} + pdf-parse@1.1.1: dependencies: debug: 3.2.7(supports-color@5.5.0) @@ -34841,8 +36442,6 @@ snapshots: pg-connection-string@2.6.2: {} - pg-connection-string@2.6.4: {} - pg-connection-string@2.7.0: {} pg-int8@1.0.1: {} @@ -34853,7 +36452,7 @@ snapshots: dependencies: pg: 8.11.3 - pg-pool@3.6.2(pg@8.11.5): + pg-pool@3.7.1(pg@8.11.5): dependencies: pg: 8.11.5 @@ -34863,8 +36462,6 @@ snapshots: pg-protocol@1.6.0: {} - pg-protocol@1.6.1: {} - pg-protocol@1.7.1: {} pg-types@2.2.0: @@ -34899,9 +36496,9 @@ snapshots: pg@8.11.5: dependencies: - pg-connection-string: 2.6.4 - pg-pool: 3.6.2(pg@8.11.5) - pg-protocol: 1.6.1 + pg-connection-string: 2.7.0 + pg-pool: 3.7.1(pg@8.11.5) + pg-protocol: 1.7.1 pg-types: 2.2.0 pgpass: 1.0.5 optionalDependencies: @@ -35140,13 +36737,13 @@ snapshots: postcss: 8.4.35 postcss-value-parser: 4.2.0 - postcss-load-config@4.0.2(postcss@8.4.39)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2)): + postcss-load-config@4.0.2(postcss@8.4.39)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2)): dependencies: lilconfig: 3.1.1 yaml: 2.4.1 optionalDependencies: postcss: 8.4.39 - ts-node: 10.9.2(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2) + ts-node: 10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2) postcss-loader@6.2.1(postcss@8.4.35)(webpack@5.90.3(@swc/core@1.4.6)): dependencies: @@ -35423,7 +37020,7 @@ snapshots: dependencies: nanoid: 3.3.7 picocolors: 1.1.1 - source-map-js: 1.2.0 + source-map-js: 1.2.1 postcss@8.4.49: dependencies: @@ -35462,7 +37059,7 @@ snapshots: prebuild-install@7.1.2: dependencies: - detect-libc: 2.0.3 + detect-libc: 2.0.2 expand-template: 2.0.3 github-from-package: 0.0.0 minimist: 1.2.8 @@ -35726,7 +37323,7 @@ snapshots: '@protobufjs/path': 1.1.2 '@protobufjs/pool': 1.1.0 '@protobufjs/utf8': 1.1.0 - '@types/node': 22.13.9 + '@types/node': 22.5.4 long: 5.2.3 protoc-gen-ts@0.8.7: {} @@ -35821,6 +37418,8 @@ snapshots: pure-color@1.3.0: {} + pure-rand@6.1.0: {} + pyodide@0.25.0(bufferutil@4.0.8)(utf-8-validate@6.0.4): dependencies: base-64: 1.0.0 @@ -35843,6 +37442,10 @@ snapshots: dependencies: side-channel: 1.0.6 + qs@6.12.1: + dependencies: + side-channel: 1.0.6 + qs@6.13.0: dependencies: side-channel: 1.0.6 @@ -35874,6 +37477,8 @@ snapshots: fs-promise: 2.0.3 lodash: 4.17.21 + random-bytes@1.0.0: {} + randombytes@2.1.0: dependencies: safe-buffer: 5.2.1 @@ -36117,10 +37722,10 @@ snapshots: history: 5.3.0 react: 18.2.0 - react-scripts@5.0.1(@babel/plugin-syntax-flow@7.23.3(@babel/core@7.24.0))(@babel/plugin-transform-react-jsx@7.25.9(@babel/core@7.24.0))(@swc/core@1.4.6)(@types/babel__core@7.20.5)(bufferutil@4.0.8)(canvas@2.11.2(encoding@0.1.13))(eslint@8.57.0)(react@18.2.0)(sass@1.71.1)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2))(type-fest@4.12.0)(typescript@5.5.2)(utf-8-validate@6.0.4): + react-scripts@5.0.1(@babel/plugin-syntax-flow@7.23.3(@babel/core@7.24.0))(@babel/plugin-transform-react-jsx@7.25.9(@babel/core@7.24.0))(@swc/core@1.4.6)(@types/babel__core@7.20.5)(bufferutil@4.0.8)(canvas@2.11.2(encoding@0.1.13))(eslint@8.57.0)(react@18.2.0)(sass@1.71.1)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2))(type-fest@4.40.1)(typescript@5.5.2)(utf-8-validate@6.0.4): dependencies: '@babel/core': 7.24.0 - '@pmmmwh/react-refresh-webpack-plugin': 0.5.11(react-refresh@0.11.0)(type-fest@4.12.0)(webpack-dev-server@4.15.1(bufferutil@4.0.8)(utf-8-validate@6.0.4)(webpack@5.90.3(@swc/core@1.4.6)))(webpack@5.90.3(@swc/core@1.4.6)) + '@pmmmwh/react-refresh-webpack-plugin': 0.5.11(react-refresh@0.11.0)(type-fest@4.40.1)(webpack-dev-server@4.15.1(bufferutil@4.0.8)(utf-8-validate@6.0.4)(webpack@5.90.3(@swc/core@1.4.6)))(webpack@5.90.3(@swc/core@1.4.6)) '@svgr/webpack': 5.5.0 babel-jest: 27.5.1(@babel/core@7.24.0) babel-loader: 8.3.0(@babel/core@7.24.0)(webpack@5.90.3(@swc/core@1.4.6)) @@ -36135,15 +37740,15 @@ snapshots: dotenv: 10.0.0 dotenv-expand: 5.1.0 eslint: 8.57.0 - eslint-config-react-app: 7.0.1(@babel/plugin-syntax-flow@7.23.3(@babel/core@7.24.0))(@babel/plugin-transform-react-jsx@7.25.9(@babel/core@7.24.0))(eslint@8.57.0)(jest@27.5.1(bufferutil@4.0.8)(canvas@2.11.2(encoding@0.1.13))(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2))(utf-8-validate@6.0.4))(typescript@5.5.2) + eslint-config-react-app: 7.0.1(@babel/plugin-syntax-flow@7.23.3(@babel/core@7.24.0))(@babel/plugin-transform-react-jsx@7.25.9(@babel/core@7.24.0))(eslint@8.57.0)(jest@27.5.1(bufferutil@4.0.8)(canvas@2.11.2(encoding@0.1.13))(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2))(utf-8-validate@6.0.4))(typescript@5.5.2) eslint-webpack-plugin: 3.2.0(eslint@8.57.0)(webpack@5.90.3(@swc/core@1.4.6)) file-loader: 6.2.0(webpack@5.90.3(@swc/core@1.4.6)) fs-extra: 10.1.0 html-webpack-plugin: 5.6.0(webpack@5.90.3(@swc/core@1.4.6)) identity-obj-proxy: 3.0.0 - jest: 27.5.1(bufferutil@4.0.8)(canvas@2.11.2(encoding@0.1.13))(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2))(utf-8-validate@6.0.4) + jest: 27.5.1(bufferutil@4.0.8)(canvas@2.11.2(encoding@0.1.13))(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2))(utf-8-validate@6.0.4) jest-resolve: 27.5.1 - jest-watch-typeahead: 1.1.0(jest@27.5.1(bufferutil@4.0.8)(canvas@2.11.2(encoding@0.1.13))(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2))(utf-8-validate@6.0.4)) + jest-watch-typeahead: 1.1.0(jest@27.5.1(bufferutil@4.0.8)(canvas@2.11.2(encoding@0.1.13))(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2))(utf-8-validate@6.0.4)) mini-css-extract-plugin: 2.8.1(webpack@5.90.3(@swc/core@1.4.6)) postcss: 8.4.35 postcss-flexbugs-fixes: 5.0.2(postcss@8.4.35) @@ -36161,7 +37766,7 @@ snapshots: semver: 7.7.1 source-map-loader: 3.0.2(webpack@5.90.3(@swc/core@1.4.6)) style-loader: 3.3.4(webpack@5.90.3(@swc/core@1.4.6)) - tailwindcss: 3.4.1(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2)) + tailwindcss: 3.4.1(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2)) terser-webpack-plugin: 5.3.10(@swc/core@1.4.6)(webpack@5.90.3(@swc/core@1.4.6)) webpack: 5.90.3(@swc/core@1.4.6) webpack-dev-server: 4.15.1(bufferutil@4.0.8)(utf-8-validate@6.0.4)(webpack@5.90.3(@swc/core@1.4.6)) @@ -36203,6 +37808,14 @@ snapshots: - webpack-hot-middleware - webpack-plugin-serve + react-smooth@4.0.1(react-dom@18.2.0(react@18.2.0))(react@18.2.0): + dependencies: + fast-equals: 5.0.1 + prop-types: 15.8.1 + react: 18.2.0 + react-dom: 18.2.0(react@18.2.0) + react-transition-group: 4.4.5(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + react-syntax-highlighter@15.5.0(react@18.2.0): dependencies: '@babel/runtime': 7.24.0 @@ -36214,7 +37827,7 @@ snapshots: react-textarea-autosize@8.5.3(@types/react@18.2.65)(react@18.2.0): dependencies: - '@babel/runtime': 7.26.9 + '@babel/runtime': 7.26.10 react: 18.2.0 use-composed-ref: 1.3.0(react@18.2.0) use-latest: 1.2.1(@types/react@18.2.65)(react@18.2.0) @@ -36223,7 +37836,7 @@ snapshots: react-transition-group@4.4.5(react-dom@18.2.0(react@18.2.0))(react@18.2.0): dependencies: - '@babel/runtime': 7.26.9 + '@babel/runtime': 7.26.10 dom-helpers: 5.2.1 loose-envify: 1.4.0 prop-types: 15.8.1 @@ -36239,14 +37852,14 @@ snapshots: lodash: 4.17.21 react: 18.2.0 - reactflow@11.10.4(@types/react@18.2.65)(immer@9.0.21)(react-dom@18.2.0(react@18.2.0))(react@18.2.0): + reactflow@11.10.4(@types/react@18.2.65)(immer@10.1.1)(react-dom@18.2.0(react@18.2.0))(react@18.2.0): dependencies: - '@reactflow/background': 11.3.9(@types/react@18.2.65)(immer@9.0.21)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) - '@reactflow/controls': 11.2.9(@types/react@18.2.65)(immer@9.0.21)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) - '@reactflow/core': 11.10.4(@types/react@18.2.65)(immer@9.0.21)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) - '@reactflow/minimap': 11.7.9(@types/react@18.2.65)(immer@9.0.21)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) - '@reactflow/node-resizer': 2.2.9(@types/react@18.2.65)(immer@9.0.21)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) - '@reactflow/node-toolbar': 1.3.9(@types/react@18.2.65)(immer@9.0.21)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@reactflow/background': 11.3.9(@types/react@18.2.65)(immer@10.1.1)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@reactflow/controls': 11.2.9(@types/react@18.2.65)(immer@10.1.1)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@reactflow/core': 11.10.4(@types/react@18.2.65)(immer@10.1.1)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@reactflow/minimap': 11.7.9(@types/react@18.2.65)(immer@10.1.1)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@reactflow/node-resizer': 2.2.9(@types/react@18.2.65)(immer@10.1.1)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + '@reactflow/node-toolbar': 1.3.9(@types/react@18.2.65)(immer@10.1.1)(react-dom@18.2.0(react@18.2.0))(react@18.2.0) react: 18.2.0 react-dom: 18.2.0(react@18.2.0) transitivePeerDependencies: @@ -36347,6 +37960,23 @@ snapshots: dependencies: picomatch: 2.3.1 + recharts-scale@0.4.5: + dependencies: + decimal.js-light: 2.5.1 + + recharts@2.12.7(react-dom@18.2.0(react@18.2.0))(react@18.2.0): + dependencies: + clsx: 2.1.0 + eventemitter3: 4.0.7 + lodash: 4.17.21 + react: 18.2.0 + react-dom: 18.2.0(react@18.2.0) + react-is: 16.13.1 + react-smooth: 4.0.1(react-dom@18.2.0(react@18.2.0))(react@18.2.0) + recharts-scale: 0.4.5 + tiny-invariant: 1.3.3 + victory-vendor: 36.9.2 + rechoir@0.6.2: dependencies: resolve: 1.22.8 @@ -36383,10 +38013,25 @@ snapshots: '@redis/search': 1.1.6(@redis/client@1.5.14) '@redis/time-series': 1.0.5(@redis/client@1.5.14) + redis@4.7.0: + dependencies: + '@redis/bloom': 1.2.0(@redis/client@1.6.0) + '@redis/client': 1.6.0 + '@redis/graph': 1.1.1(@redis/client@1.6.0) + '@redis/json': 1.0.7(@redis/client@1.6.0) + '@redis/search': 1.2.0(@redis/client@1.6.0) + '@redis/time-series': 1.1.0(@redis/client@1.6.0) + + redux-thunk@3.1.0(redux@5.0.1): + dependencies: + redux: 5.0.1 + redux@4.2.1: dependencies: '@babel/runtime': 7.24.0 + redux@5.0.1: {} + reflect-metadata@0.1.14: {} reflect-metadata@0.2.1: {} @@ -36421,7 +38066,7 @@ snapshots: regenerator-transform@0.15.2: dependencies: - '@babel/runtime': 7.26.9 + '@babel/runtime': 7.26.10 regex-not@1.0.2: dependencies: @@ -36566,6 +38211,8 @@ snapshots: reselect@4.1.8: {} + reselect@5.1.1: {} + resolve-alpn@1.2.1: {} resolve-cwd@3.0.0: @@ -36597,6 +38244,8 @@ snapshots: resolve.exports@1.1.1: {} + resolve.exports@2.0.3: {} + resolve@1.22.8: dependencies: is-core-module: 2.13.1 @@ -37250,6 +38899,11 @@ snapshots: dependencies: source-map: 0.5.7 + source-map-support@0.5.13: + dependencies: + buffer-from: 1.1.2 + source-map: 0.6.1 + source-map-support@0.5.21: dependencies: buffer-from: 1.1.2 @@ -37599,9 +39253,14 @@ snapshots: strip-json-comments@3.1.1: {} + stripe@15.12.0: + dependencies: + '@types/node': 22.5.4 + qs: 6.12.1 + stripe@17.3.1: dependencies: - '@types/node': 20.12.12 + '@types/node': 22.5.4 qs: 6.13.0 strnum@1.0.5: {} @@ -37670,6 +39329,42 @@ snapshots: pirates: 4.0.6 ts-interface-checker: 0.1.13 + superagent@9.0.2: + dependencies: + component-emitter: 1.3.1 + cookiejar: 2.1.4 + debug: 4.4.0(supports-color@8.1.1) + fast-safe-stringify: 2.1.1 + form-data: 4.0.1 + formidable: 3.5.4 + methods: 1.1.2 + mime: 2.6.0 + qs: 6.13.0 + transitivePeerDependencies: + - supports-color + + supergateway@3.0.1(bufferutil@4.0.8)(utf-8-validate@6.0.4): + dependencies: + '@modelcontextprotocol/sdk': 1.12.0 + body-parser: 2.0.2 + cors: 2.8.5 + express: 4.21.2 + uuid: 11.1.0 + ws: 8.18.2(bufferutil@4.0.8)(utf-8-validate@6.0.4) + yargs: 17.7.2 + zod: 3.25.32 + transitivePeerDependencies: + - bufferutil + - supports-color + - utf-8-validate + + supertest@7.1.0: + dependencies: + methods: 1.1.2 + superagent: 9.0.2 + transitivePeerDependencies: + - supports-color + supports-color@2.0.0: {} supports-color@5.5.0: @@ -37778,7 +39473,7 @@ snapshots: symbol-tree@3.2.4: {} - tailwindcss@3.4.1(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2)): + tailwindcss@3.4.1(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2)): dependencies: '@alloc/quick-lru': 5.2.0 arg: 5.0.2 @@ -37797,7 +39492,7 @@ snapshots: postcss: 8.4.39 postcss-import: 15.1.0(postcss@8.4.39) postcss-js: 4.0.1(postcss@8.4.39) - postcss-load-config: 4.0.2(postcss@8.4.39)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2)) + postcss-load-config: 4.0.2(postcss@8.4.39)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2)) postcss-nested: 6.0.1(postcss@8.4.39) postcss-selector-parser: 6.0.15 resolve: 1.22.8 @@ -38073,14 +39768,34 @@ snapshots: ts-interface-checker@0.1.13: {} - ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2): + ts-jest@29.3.2(@babel/core@7.24.0)(@jest/transform@29.7.0)(@jest/types@29.6.3)(babel-jest@29.7.0(@babel/core@7.24.0))(jest@29.7.0(@types/node@22.5.4)(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2)))(typescript@5.5.2): + dependencies: + bs-logger: 0.2.6 + ejs: 3.1.10 + fast-json-stable-stringify: 2.1.0 + jest: 29.7.0(@types/node@22.5.4)(babel-plugin-macros@3.1.0)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2)) + jest-util: 29.7.0 + json5: 2.2.3 + lodash.memoize: 4.1.2 + make-error: 1.3.6 + semver: 7.7.1 + type-fest: 4.40.1 + typescript: 5.5.2 + yargs-parser: 21.1.1 + optionalDependencies: + '@babel/core': 7.24.0 + '@jest/transform': 29.7.0 + '@jest/types': 29.6.3 + babel-jest: 29.7.0(@babel/core@7.24.0) + + ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2): dependencies: '@cspotcode/source-map-support': 0.8.1 '@tsconfig/node10': 1.0.9 '@tsconfig/node12': 1.0.11 '@tsconfig/node14': 1.0.3 '@tsconfig/node16': 1.0.4 - '@types/node': 22.13.9 + '@types/node': 22.5.4 acorn: 8.11.3 acorn-walk: 8.3.2 arg: 4.1.3 @@ -38097,7 +39812,7 @@ snapshots: ts-type@3.0.1(ts-toolbelt@9.6.0): dependencies: - '@types/node': 22.13.9 + '@types/node': 22.5.4 ts-toolbelt: 9.6.0 tslib: 2.6.2 typedarray-dts: 1.0.0 @@ -38203,6 +39918,8 @@ snapshots: type-fest@4.12.0: {} + type-fest@4.40.1: {} + type-is@1.6.18: dependencies: media-typer: 0.3.0 @@ -38260,7 +39977,7 @@ snapshots: typedarray@0.0.6: {} - typeorm@0.3.20(ioredis@5.3.2)(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2)): + typeorm@0.3.20(ioredis@5.3.2)(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2)): dependencies: '@sqltools/formatter': 1.2.5 app-root-path: 3.1.0 @@ -38284,11 +40001,11 @@ snapshots: pg: 8.11.3 redis: 4.6.13 sqlite3: 5.1.7 - ts-node: 10.9.2(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2) + ts-node: 10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2) transitivePeerDependencies: - supports-color - typeorm@0.3.20(ioredis@5.4.2)(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(pg@8.11.3)(redis@4.6.13)(sqlite3@5.1.7)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2)): + typeorm@0.3.20(ioredis@5.4.2)(mongodb@6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1))(mysql2@3.11.4)(pg@8.11.3)(redis@4.7.0)(sqlite3@5.1.7)(ts-node@10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2)): dependencies: '@sqltools/formatter': 1.2.5 app-root-path: 3.1.0 @@ -38310,12 +40027,14 @@ snapshots: mongodb: 6.3.0(gcp-metadata@6.1.0(encoding@0.1.13))(socks@2.8.1) mysql2: 3.11.4 pg: 8.11.3 - redis: 4.6.13 + redis: 4.7.0 sqlite3: 5.1.7 - ts-node: 10.9.2(@swc/core@1.4.6)(@types/node@22.13.9)(typescript@5.5.2) + ts-node: 10.9.2(@swc/core@1.4.6)(@types/node@22.5.4)(typescript@5.5.2) transitivePeerDependencies: - supports-color + typescript-event-target@1.1.1: {} + typescript@5.5.2: {} ua-parser-js@0.7.37: {} @@ -38324,6 +40043,15 @@ snapshots: uc.micro@2.1.0: {} + uglify-js@3.19.3: + optional: true + + uid-safe@2.1.5: + dependencies: + random-bytes: 1.0.0 + + uid2@0.0.4: {} + unbox-primitive@1.0.2: dependencies: call-bind: 1.0.7 @@ -38368,7 +40096,7 @@ snapshots: undici-types@5.26.5: {} - undici-types@6.20.0: {} + undici-types@6.19.8: {} undici@5.28.3: dependencies: @@ -38630,6 +40358,12 @@ snapshots: convert-source-map: 1.9.0 source-map: 0.7.4 + v8-to-istanbul@9.3.0: + dependencies: + '@jridgewell/trace-mapping': 0.3.25 + '@types/istanbul-lib-coverage': 2.0.6 + convert-source-map: 2.0.0 + v8flags@3.2.0: dependencies: homedir-polyfill: 1.0.3 @@ -38689,6 +40423,23 @@ snapshots: unist-util-stringify-position: 4.0.0 vfile-message: 4.0.2 + victory-vendor@36.9.2: + dependencies: + '@types/d3-array': 3.2.1 + '@types/d3-ease': 3.0.2 + '@types/d3-interpolate': 3.0.4 + '@types/d3-scale': 4.0.8 + '@types/d3-shape': 3.1.6 + '@types/d3-time': 3.0.3 + '@types/d3-timer': 3.0.2 + d3-array: 3.2.4 + d3-ease: 3.0.1 + d3-interpolate: 3.0.1 + d3-scale: 4.0.2 + d3-shape: 3.2.0 + d3-time: 3.1.0 + d3-timer: 3.0.1 + vinyl-file@3.0.0: dependencies: graceful-fs: 4.2.11 @@ -38736,12 +40487,12 @@ snapshots: remove-trailing-separator: 1.1.0 replace-ext: 1.0.1 - vite-plugin-pwa@0.17.5(vite@5.1.6(@types/node@22.13.9)(sass@1.71.1)(terser@5.29.1))(workbox-build@7.0.0(@types/babel__core@7.20.5))(workbox-window@7.0.0): + vite-plugin-pwa@0.17.5(vite@5.1.6(@types/node@22.5.4)(sass@1.71.1)(terser@5.29.1))(workbox-build@7.0.0(@types/babel__core@7.20.5))(workbox-window@7.0.0): dependencies: debug: 4.3.4(supports-color@8.1.1) fast-glob: 3.3.2 pretty-bytes: 6.1.1 - vite: 5.1.6(@types/node@22.13.9)(sass@1.71.1)(terser@5.29.1) + vite: 5.1.6(@types/node@22.5.4)(sass@1.71.1)(terser@5.29.1) workbox-build: 7.0.0(@types/babel__core@7.20.5) workbox-window: 7.0.0 transitivePeerDependencies: @@ -38754,35 +40505,35 @@ snapshots: transitivePeerDependencies: - supports-color - vite-tsconfig-paths@4.3.1(typescript@5.5.2)(vite@4.5.2(@types/node@22.13.9)(sass@1.71.1)(terser@5.29.1)): + vite-tsconfig-paths@4.3.1(typescript@5.5.2)(vite@4.5.2(@types/node@22.5.4)(sass@1.71.1)(terser@5.29.1)): dependencies: debug: 4.4.0(supports-color@8.1.1) globrex: 0.1.2 tsconfck: 3.0.3(typescript@5.5.2) optionalDependencies: - vite: 4.5.2(@types/node@22.13.9)(sass@1.71.1)(terser@5.29.1) + vite: 4.5.2(@types/node@22.5.4)(sass@1.71.1)(terser@5.29.1) transitivePeerDependencies: - supports-color - typescript - vite@4.5.2(@types/node@22.13.9)(sass@1.71.1)(terser@5.29.1): + vite@4.5.2(@types/node@22.5.4)(sass@1.71.1)(terser@5.29.1): dependencies: esbuild: 0.18.20 postcss: 8.4.49 rollup: 3.29.4 optionalDependencies: - '@types/node': 22.13.9 + '@types/node': 22.5.4 fsevents: 2.3.3 sass: 1.71.1 terser: 5.29.1 - vite@5.1.6(@types/node@22.13.9)(sass@1.71.1)(terser@5.29.1): + vite@5.1.6(@types/node@22.5.4)(sass@1.71.1)(terser@5.29.1): dependencies: esbuild: 0.19.12 postcss: 8.4.35 rollup: 4.13.0 optionalDependencies: - '@types/node': 22.13.9 + '@types/node': 22.5.4 fsevents: 2.3.3 sass: 1.71.1 terser: 5.29.1 @@ -39114,6 +40865,14 @@ snapshots: wink-nlp@2.3.0: {} + winston-daily-rotate-file@5.0.0(winston@3.12.0): + dependencies: + file-stream-rotator: 0.6.1 + object-hash: 3.0.0 + triple-beam: 1.4.1 + winston: 3.12.0 + winston-transport: 4.7.0 + winston-transport@4.7.0: dependencies: logform: 2.6.0 @@ -39161,7 +40920,7 @@ snapshots: '@apideck/better-ajv-errors': 0.3.6(ajv@8.13.0) '@babel/core': 7.24.0 '@babel/preset-env': 7.24.5(@babel/core@7.24.0) - '@babel/runtime': 7.26.9 + '@babel/runtime': 7.26.10 '@rollup/plugin-babel': 5.3.1(@babel/core@7.24.0)(@types/babel__core@7.20.5)(rollup@2.79.1) '@rollup/plugin-node-resolve': 11.2.1(rollup@2.79.1) '@rollup/plugin-replace': 2.4.2(rollup@2.79.1) @@ -39204,7 +40963,7 @@ snapshots: '@apideck/better-ajv-errors': 0.3.6(ajv@8.13.0) '@babel/core': 7.24.0 '@babel/preset-env': 7.24.5(@babel/core@7.24.0) - '@babel/runtime': 7.26.9 + '@babel/runtime': 7.26.10 '@rollup/plugin-babel': 5.3.1(@babel/core@7.24.0)(@types/babel__core@7.20.5)(rollup@2.79.1) '@rollup/plugin-node-resolve': 11.2.1(rollup@2.79.1) '@rollup/plugin-replace': 2.4.2(rollup@2.79.1) @@ -39428,6 +41187,11 @@ snapshots: bufferutil: 4.0.8 utf-8-validate: 6.0.4 + ws@8.18.2(bufferutil@4.0.8)(utf-8-validate@6.0.4): + optionalDependencies: + bufferutil: 4.0.8 + utf-8-validate: 6.0.4 + xdg-default-browser@2.1.0: dependencies: execa: 0.2.2 @@ -39439,7 +41203,7 @@ snapshots: xml2js@0.6.2: dependencies: - sax: 1.2.1 + sax: 1.2.4 xmlbuilder: 11.0.1 xmlbuilder@10.1.1: {} @@ -39605,7 +41369,7 @@ snapshots: yup@0.32.11: dependencies: '@babel/runtime': 7.24.0 - '@types/lodash': 4.14.202 + '@types/lodash': 4.17.4 lodash: 4.17.21 lodash-es: 4.17.21 nanoclone: 0.2.1 @@ -39648,6 +41412,10 @@ snapshots: dependencies: zod: 3.24.2 + zod-to-json-schema@3.24.1(zod@3.25.32): + dependencies: + zod: 3.25.32 + zod-validation-error@3.3.0(zod@3.22.4): dependencies: zod: 3.22.4 @@ -39658,12 +41426,14 @@ snapshots: zod@3.24.2: {} - zustand@4.5.2(@types/react@18.2.65)(immer@9.0.21)(react@18.2.0): + zod@3.25.32: {} + + zustand@4.5.2(@types/react@18.2.65)(immer@10.1.1)(react@18.2.0): dependencies: use-sync-external-store: 1.2.0(react@18.2.0) optionalDependencies: '@types/react': 18.2.65 - immer: 9.0.21 + immer: 10.1.1 react: 18.2.0 zwitch@2.0.4: {}